VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h@ 106692

Last change on this file since 106692 was 106692, checked in by vboxsync, 4 weeks ago

ValidationKit/bootsectors: Implement SIMD FP testcases for cvtpi2ps, and fix it in IEM; bugref:10658; jiraref:VBP-1206

IEM:

  • fix IEM cvtpi2ps [mem] trashing the target register
  • fix IEM cvtpi2ps [mem] sometimes leaving x87/MMX FTW in the wrong state

Valkit:

  • fix Bs3ExtCtxSetReg() ability to load MMX registers
  • add BS3_REGISTER_IS_MMX macro
  • fix test worker's interaction with x87/MMX FTW
  • add 'RAND_x6' FP32 value bars for use in tests of these instructions
  • add wacky set of macros for injecting integers into FP data arrays
  • implement cvtpi2ps tests
  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 518.3 KB
Line 
1/* $Id: IEMAllInstTwoByte0f.cpp.h 106692 2024-10-25 12:27:42Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2024 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 *
42 * The @a pfnU64 worker function takes no FXSAVE state, just the operands.
43 */
44FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
45{
46 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
47 if (IEM_IS_MODRM_REG_MODE(bRm))
48 {
49 /*
50 * MMX, MMX.
51 */
52 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
53 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
54 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
55 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
56 IEM_MC_ARG(uint64_t *, pDst, 0);
57 IEM_MC_ARG(uint64_t const *, pSrc, 1);
58 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
59 IEM_MC_PREPARE_FPU_USAGE();
60 IEM_MC_FPU_TO_MMX_MODE();
61
62 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
63 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
64 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
65 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
66
67 IEM_MC_ADVANCE_RIP_AND_FINISH();
68 IEM_MC_END();
69 }
70 else
71 {
72 /*
73 * MMX, [mem64].
74 */
75 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
76 IEM_MC_ARG(uint64_t *, pDst, 0);
77 IEM_MC_LOCAL(uint64_t, uSrc);
78 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
79 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
80
81 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
82 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
83 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
84 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
85
86 IEM_MC_PREPARE_FPU_USAGE();
87 IEM_MC_FPU_TO_MMX_MODE();
88
89 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
90 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
91 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
92
93 IEM_MC_ADVANCE_RIP_AND_FINISH();
94 IEM_MC_END();
95 }
96}
97
98
99/**
100 * Common worker for MMX instructions on the form:
101 * pxxx mm1, mm2/mem64
102 * for instructions introduced with SSE.
103 *
104 * The @a pfnU64 worker function takes no FXSAVE state, just the operands.
105 */
106FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
107{
108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
109 if (IEM_IS_MODRM_REG_MODE(bRm))
110 {
111 /*
112 * MMX, MMX.
113 */
114 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
115 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
116 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
118 IEM_MC_ARG(uint64_t *, pDst, 0);
119 IEM_MC_ARG(uint64_t const *, pSrc, 1);
120 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
121 IEM_MC_PREPARE_FPU_USAGE();
122 IEM_MC_FPU_TO_MMX_MODE();
123
124 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
125 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
126 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
127 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
128
129 IEM_MC_ADVANCE_RIP_AND_FINISH();
130 IEM_MC_END();
131 }
132 else
133 {
134 /*
135 * MMX, [mem64].
136 */
137 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
138 IEM_MC_ARG(uint64_t *, pDst, 0);
139 IEM_MC_LOCAL(uint64_t, uSrc);
140 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
142
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
145 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
146 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
147
148 IEM_MC_PREPARE_FPU_USAGE();
149 IEM_MC_FPU_TO_MMX_MODE();
150
151 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
152 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
153 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
154
155 IEM_MC_ADVANCE_RIP_AND_FINISH();
156 IEM_MC_END();
157 }
158}
159
160
161/**
162 * Common worker for MMX instructions on the form:
163 * pxxx mm1, mm2/mem64
164 * that was introduced with SSE2.
165 */
166FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
167{
168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
169 if (IEM_IS_MODRM_REG_MODE(bRm))
170 {
171 /*
172 * MMX, MMX.
173 */
174 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
175 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
176 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
178 IEM_MC_ARG(uint64_t *, pDst, 0);
179 IEM_MC_ARG(uint64_t const *, pSrc, 1);
180 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
181 IEM_MC_PREPARE_FPU_USAGE();
182 IEM_MC_FPU_TO_MMX_MODE();
183
184 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
185 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
186 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
187 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
188
189 IEM_MC_ADVANCE_RIP_AND_FINISH();
190 IEM_MC_END();
191 }
192 else
193 {
194 /*
195 * MMX, [mem64].
196 */
197 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
198 IEM_MC_ARG(uint64_t *, pDst, 0);
199 IEM_MC_LOCAL(uint64_t, uSrc);
200 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
202
203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
205 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
206 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
207
208 IEM_MC_PREPARE_FPU_USAGE();
209 IEM_MC_FPU_TO_MMX_MODE();
210
211 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
212 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
213 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
214
215 IEM_MC_ADVANCE_RIP_AND_FINISH();
216 IEM_MC_END();
217 }
218}
219
220
221/**
222 * Common worker for SSE instructions of the form:
223 * pxxx xmm1, xmm2/mem128
224 *
225 * Proper alignment of the 128-bit operand is enforced.
226 * SSE cpuid checks. No SIMD FP exceptions.
227 *
228 * The @a pfnU128 worker function takes no FXSAVE state, just the operands.
229 *
230 * @sa iemOpCommonSse2_FullFull_To_Full
231 */
232FNIEMOP_DEF_1(iemOpCommonSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
233{
234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
235 if (IEM_IS_MODRM_REG_MODE(bRm))
236 {
237 /*
238 * XMM, XMM.
239 */
240 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
242 IEM_MC_ARG(PRTUINT128U, pDst, 0);
243 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
244 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
245 IEM_MC_PREPARE_SSE_USAGE();
246 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
248 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
249 IEM_MC_ADVANCE_RIP_AND_FINISH();
250 IEM_MC_END();
251 }
252 else
253 {
254 /*
255 * XMM, [mem128].
256 */
257 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
258 IEM_MC_ARG(PRTUINT128U, pDst, 0);
259 IEM_MC_LOCAL(RTUINT128U, uSrc);
260 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
262
263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
265 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
266 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
267
268 IEM_MC_PREPARE_SSE_USAGE();
269 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
270 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
271
272 IEM_MC_ADVANCE_RIP_AND_FINISH();
273 IEM_MC_END();
274 }
275}
276
277
278/**
279 * Common worker for SSE2 instructions on the forms:
280 * pxxx xmm1, xmm2/mem128
281 *
282 * Proper alignment of the 128-bit operand is enforced.
283 * Exceptions type 4. SSE2 cpuid checks.
284 *
285 * The @a pfnU128 worker function takes no FXSAVE state, just the operands.
286 *
287 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
288 */
289FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
290{
291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
292 if (IEM_IS_MODRM_REG_MODE(bRm))
293 {
294 /*
295 * XMM, XMM.
296 */
297 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
299 IEM_MC_ARG(PRTUINT128U, pDst, 0);
300 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
302 IEM_MC_PREPARE_SSE_USAGE();
303 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
304 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
305 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
306 IEM_MC_ADVANCE_RIP_AND_FINISH();
307 IEM_MC_END();
308 }
309 else
310 {
311 /*
312 * XMM, [mem128].
313 */
314 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
315 IEM_MC_ARG(PRTUINT128U, pDst, 0);
316 IEM_MC_LOCAL(RTUINT128U, uSrc);
317 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
319
320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
322 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
323 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
324
325 IEM_MC_PREPARE_SSE_USAGE();
326 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
327 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
328
329 IEM_MC_ADVANCE_RIP_AND_FINISH();
330 IEM_MC_END();
331 }
332}
333
334
335/**
336 * A body preprocessor variant of iemOpCommonSse2Opt_FullFull_To_Full in order
337 * to support native emitters for certain instructions.
338 */
339#define SSE2_OPT_BODY_FullFull_To_Full(a_Ins, a_pImplExpr, a_fRegNativeArchs, a_fMemNativeArchs) \
340 PFNIEMAIMPLMEDIAOPTF2U128 const pfnU128 = (a_pImplExpr); \
341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
342 if (IEM_IS_MODRM_REG_MODE(bRm)) \
343 { \
344 /* \
345 * XMM, XMM. \
346 */ \
347 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2); \
349 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
350 IEM_MC_PREPARE_SSE_USAGE(); \
351 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
352 IEM_MC_NATIVE_EMIT_2(RT_CONCAT3(iemNativeEmit_,a_Ins,_rr_u128), IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
353 } IEM_MC_NATIVE_ELSE() { \
354 IEM_MC_ARG(PRTUINT128U, pDst, 0); \
355 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
356 IEM_MC_ARG(PCRTUINT128U, pSrc, 1); \
357 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
358 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc); \
359 } IEM_MC_NATIVE_ENDIF(); \
360 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
361 IEM_MC_END(); \
362 } \
363 else \
364 { \
365 /* \
366 * XMM, [mem128]. \
367 */ \
368 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
369 IEM_MC_LOCAL(RTUINT128U, uSrc); \
370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2); \
373 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
374 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
375 IEM_MC_PREPARE_SSE_USAGE(); \
376 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
377 IEM_MC_NATIVE_EMIT_2(RT_CONCAT3(iemNativeEmit_,a_Ins,_rv_u128), IEM_GET_MODRM_REG(pVCpu, bRm), uSrc); \
378 } IEM_MC_NATIVE_ELSE() { \
379 IEM_MC_ARG(PRTUINT128U, pDst, 0); \
380 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
381 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1); \
382 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc); \
383 } IEM_MC_NATIVE_ENDIF(); \
384 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
385 IEM_MC_END(); \
386 } void(0)
387
388
389/**
390 * Common worker for MMX instructions on the forms:
391 * pxxxx mm1, mm2/mem32
392 *
393 * The 2nd operand is the first half of a register, which in the memory case
394 * means a 32-bit memory access.
395 */
396FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
397{
398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
399 if (IEM_IS_MODRM_REG_MODE(bRm))
400 {
401 /*
402 * MMX, MMX.
403 */
404 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
406 IEM_MC_ARG(uint64_t *, puDst, 0);
407 IEM_MC_ARG(uint64_t const *, puSrc, 1);
408 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
409 IEM_MC_PREPARE_FPU_USAGE();
410 IEM_MC_FPU_TO_MMX_MODE();
411
412 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
413 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
414 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
415 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
416
417 IEM_MC_ADVANCE_RIP_AND_FINISH();
418 IEM_MC_END();
419 }
420 else
421 {
422 /*
423 * MMX, [mem32].
424 */
425 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
426 IEM_MC_ARG(uint64_t *, puDst, 0);
427 IEM_MC_LOCAL(uint64_t, uSrc);
428 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
430
431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
433 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
434 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
435
436 IEM_MC_PREPARE_FPU_USAGE();
437 IEM_MC_FPU_TO_MMX_MODE();
438
439 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
440 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
441 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
442
443 IEM_MC_ADVANCE_RIP_AND_FINISH();
444 IEM_MC_END();
445 }
446}
447
448
449/**
450 * Common worker for SSE instructions on the forms:
451 * pxxxx xmm1, xmm2/mem128
452 *
453 * The 2nd operand is the first half of a register, which in the memory case
454 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
455 *
456 * Exceptions type 4.
457 */
458FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
459{
460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
461 if (IEM_IS_MODRM_REG_MODE(bRm))
462 {
463 /*
464 * XMM, XMM.
465 */
466 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
468 IEM_MC_ARG(PRTUINT128U, puDst, 0);
469 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
470 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
471 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
472 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
473 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
474 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
475 IEM_MC_ADVANCE_RIP_AND_FINISH();
476 IEM_MC_END();
477 }
478 else
479 {
480 /*
481 * XMM, [mem128].
482 */
483 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
484 IEM_MC_ARG(PRTUINT128U, puDst, 0);
485 IEM_MC_LOCAL(RTUINT128U, uSrc);
486 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
488
489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
491 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
492 /** @todo Most CPUs probably only read the low qword. We read everything to
493 * make sure we apply segmentation and alignment checks correctly.
494 * When we have time, it would be interesting to explore what real
495 * CPUs actually does and whether it will do a TLB load for the high
496 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
497 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
498
499 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
500 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
501 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
502
503 IEM_MC_ADVANCE_RIP_AND_FINISH();
504 IEM_MC_END();
505 }
506}
507
508
509/**
510 * Common worker for SSE2 instructions on the forms:
511 * pxxxx xmm1, xmm2/mem128
512 *
513 * The 2nd operand is the first half of a register, which in the memory case
514 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
515 *
516 * Exceptions type 4.
517 */
518FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
519{
520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
521 if (IEM_IS_MODRM_REG_MODE(bRm))
522 {
523 /*
524 * XMM, XMM.
525 */
526 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
528 IEM_MC_ARG(PRTUINT128U, puDst, 0);
529 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
530 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
531 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
532 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
533 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
534 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
535 IEM_MC_ADVANCE_RIP_AND_FINISH();
536 IEM_MC_END();
537 }
538 else
539 {
540 /*
541 * XMM, [mem128].
542 */
543 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
544 IEM_MC_ARG(PRTUINT128U, puDst, 0);
545 IEM_MC_LOCAL(RTUINT128U, uSrc);
546 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
548
549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
551 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
552 /** @todo Most CPUs probably only read the low qword. We read everything to
553 * make sure we apply segmentation and alignment checks correctly.
554 * When we have time, it would be interesting to explore what real
555 * CPUs actually does and whether it will do a TLB load for the high
556 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
557 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
558
559 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
560 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
562
563 IEM_MC_ADVANCE_RIP_AND_FINISH();
564 IEM_MC_END();
565 }
566}
567
568
569/**
570 * Common worker for MMX instructions on the form:
571 * pxxxx mm1, mm2/mem64
572 *
573 * The 2nd operand is the second half of a register, which in the memory case
574 * means a 64-bit memory access for MMX.
575 */
576FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
577{
578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
579 if (IEM_IS_MODRM_REG_MODE(bRm))
580 {
581 /*
582 * MMX, MMX.
583 */
584 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
585 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
586 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
588 IEM_MC_ARG(uint64_t *, puDst, 0);
589 IEM_MC_ARG(uint64_t const *, puSrc, 1);
590 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
591 IEM_MC_PREPARE_FPU_USAGE();
592 IEM_MC_FPU_TO_MMX_MODE();
593
594 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
595 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
596 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
597 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
598
599 IEM_MC_ADVANCE_RIP_AND_FINISH();
600 IEM_MC_END();
601 }
602 else
603 {
604 /*
605 * MMX, [mem64].
606 */
607 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
608 IEM_MC_ARG(uint64_t *, puDst, 0);
609 IEM_MC_LOCAL(uint64_t, uSrc);
610 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
612
613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
615 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
616 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
617
618 IEM_MC_PREPARE_FPU_USAGE();
619 IEM_MC_FPU_TO_MMX_MODE();
620
621 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
623 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
624
625 IEM_MC_ADVANCE_RIP_AND_FINISH();
626 IEM_MC_END();
627 }
628}
629
630
631/**
632 * Common worker for SSE instructions on the form:
633 * pxxxx xmm1, xmm2/mem128
634 *
635 * The 2nd operand is the second half of a register, which for SSE a 128-bit
636 * aligned access where it may read the full 128 bits or only the upper 64 bits.
637 *
638 * Exceptions type 4.
639 */
640FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
641{
642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
643 if (IEM_IS_MODRM_REG_MODE(bRm))
644 {
645 /*
646 * XMM, XMM.
647 */
648 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
650 IEM_MC_ARG(PRTUINT128U, puDst, 0);
651 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
652 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
653 IEM_MC_PREPARE_SSE_USAGE();
654 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
655 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
656 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
657 IEM_MC_ADVANCE_RIP_AND_FINISH();
658 IEM_MC_END();
659 }
660 else
661 {
662 /*
663 * XMM, [mem128].
664 */
665 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
666 IEM_MC_ARG(PRTUINT128U, puDst, 0);
667 IEM_MC_LOCAL(RTUINT128U, uSrc);
668 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
670
671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
673 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
674 /** @todo Most CPUs probably only read the high qword. We read everything to
675 * make sure we apply segmentation and alignment checks correctly.
676 * When we have time, it would be interesting to explore what real
677 * CPUs actually does and whether it will do a TLB load for the lower
678 * part or skip any associated \#PF. */
679 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
680
681 IEM_MC_PREPARE_SSE_USAGE();
682 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
683 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
684
685 IEM_MC_ADVANCE_RIP_AND_FINISH();
686 IEM_MC_END();
687 }
688}
689
690
691/**
692 * Common worker for SSE instructions on the forms:
693 * pxxs xmm1, xmm2/mem128
694 *
695 * Proper alignment of the 128-bit operand is enforced.
696 * Exceptions type 2. SSE cpuid checks.
697 *
698 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
699 */
700FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
701{
702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
703 if (IEM_IS_MODRM_REG_MODE(bRm))
704 {
705 /*
706 * XMM128, XMM128.
707 */
708 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
710 IEM_MC_LOCAL(X86XMMREG, SseRes);
711 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
712 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
713 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
714 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
715 IEM_MC_PREPARE_SSE_USAGE();
716 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
717 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
718 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
719 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
720
721 IEM_MC_ADVANCE_RIP_AND_FINISH();
722 IEM_MC_END();
723 }
724 else
725 {
726 /*
727 * XMM128, [mem128].
728 */
729 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
730 IEM_MC_LOCAL(X86XMMREG, SseRes);
731 IEM_MC_LOCAL(X86XMMREG, uSrc2);
732 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
733 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
734 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
736
737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
739 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
740 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
741
742 IEM_MC_PREPARE_SSE_USAGE();
743 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
744 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
745 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
746
747 IEM_MC_ADVANCE_RIP_AND_FINISH();
748 IEM_MC_END();
749 }
750}
751
752
753/**
754 * A body preprocessor variant of iemOpCommonSseFp_FullFull_To_Full in order
755 * to support native emitters for certain instructions.
756 */
757#define SSE_FP_BODY_FullFull_To_Full(a_Ins, a_pImplExpr, a_fRegNativeArchs, a_fMemNativeArchs) \
758 PFNIEMAIMPLFPSSEF2U128 const pfnU128 = (a_pImplExpr); \
759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
760 if (IEM_IS_MODRM_REG_MODE(bRm)) \
761 { \
762 /* \
763 * XMM, XMM. \
764 */ \
765 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse); \
767 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
768 IEM_MC_PREPARE_SSE_USAGE(); \
769 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
770 IEM_MC_LIVENESS_MXCSR_MODIFY(); \
771 IEM_MC_NATIVE_EMIT_2_EX(RT_CONCAT3(iemNativeEmit_,a_Ins,_rr_u128), IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
772 } IEM_MC_NATIVE_ELSE() { \
773 IEM_MC_LOCAL(X86XMMREG, SseRes); \
774 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0); \
775 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1); \
776 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
777 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2); \
778 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
779 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2); \
780 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes); \
781 } IEM_MC_NATIVE_ENDIF(); \
782 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
783 IEM_MC_END(); \
784 } \
785 else \
786 { \
787 /* \
788 * XMM, [mem128]. \
789 */ \
790 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
791 IEM_MC_LOCAL(X86XMMREG, uSrc2); \
792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse); \
795 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
796 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
797 IEM_MC_PREPARE_SSE_USAGE(); \
798 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
799 IEM_MC_LIVENESS_MXCSR_MODIFY(); \
800 IEM_MC_NATIVE_EMIT_2_EX(RT_CONCAT3(iemNativeEmit_,a_Ins,_rv_u128), IEM_GET_MODRM_REG(pVCpu, bRm), uSrc2); \
801 } IEM_MC_NATIVE_ELSE() { \
802 IEM_MC_LOCAL(X86XMMREG, SseRes); \
803 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0); \
804 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1); \
805 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2); \
806 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
807 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2); \
808 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes); \
809 } IEM_MC_NATIVE_ENDIF(); \
810 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
811 IEM_MC_END(); \
812 } void(0)
813
814
815/**
816 * Common worker for SSE instructions on the forms:
817 * pxxs xmm1, xmm2/mem32
818 *
819 * Proper alignment of the 128-bit operand is enforced.
820 * Exceptions type 3. SSE cpuid checks.
821 *
822 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
823 */
824FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
825{
826 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
827 if (IEM_IS_MODRM_REG_MODE(bRm))
828 {
829 /*
830 * XMM128, XMM32.
831 */
832 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
834 IEM_MC_LOCAL(X86XMMREG, SseRes);
835 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
836 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
837 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
838 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
839 IEM_MC_PREPARE_SSE_USAGE();
840 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
841 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
842 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
843 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
844
845 IEM_MC_ADVANCE_RIP_AND_FINISH();
846 IEM_MC_END();
847 }
848 else
849 {
850 /*
851 * XMM128, [mem32].
852 */
853 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
854 IEM_MC_LOCAL(X86XMMREG, SseRes);
855 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
856 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
857 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
858 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
860
861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
863 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
864 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
865
866 IEM_MC_PREPARE_SSE_USAGE();
867 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
868 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
869 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
870
871 IEM_MC_ADVANCE_RIP_AND_FINISH();
872 IEM_MC_END();
873 }
874}
875
876
877/**
878 * Common worker for SSE2 instructions on the forms:
879 * pxxd xmm1, xmm2/mem128
880 *
881 * Proper alignment of the 128-bit operand is enforced.
882 * Exceptions type 2. SSE cpuid checks.
883 *
884 * @sa iemOpCommonSseFp_FullFull_To_Full
885 */
886FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
887{
888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
889 if (IEM_IS_MODRM_REG_MODE(bRm))
890 {
891 /*
892 * XMM128, XMM128.
893 */
894 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
896 IEM_MC_LOCAL(X86XMMREG, SseRes);
897 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
898 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
899 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
900 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
901 IEM_MC_PREPARE_SSE_USAGE();
902 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
903 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
904 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
905 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
906
907 IEM_MC_ADVANCE_RIP_AND_FINISH();
908 IEM_MC_END();
909 }
910 else
911 {
912 /*
913 * XMM128, [mem128].
914 */
915 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
916 IEM_MC_LOCAL(X86XMMREG, SseRes);
917 IEM_MC_LOCAL(X86XMMREG, uSrc2);
918 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
919 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
920 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
922
923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
926 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
927
928 IEM_MC_PREPARE_SSE_USAGE();
929 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
930 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
931 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
932
933 IEM_MC_ADVANCE_RIP_AND_FINISH();
934 IEM_MC_END();
935 }
936}
937
938
939/**
940 * Common worker for SSE2 instructions on the forms:
941 * pxxs xmm1, xmm2/mem64
942 *
943 * Proper alignment of the 128-bit operand is enforced.
944 * Exceptions type 3. SSE2 cpuid checks.
945 *
946 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
947 */
948FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
949{
950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
951 if (IEM_IS_MODRM_REG_MODE(bRm))
952 {
953 /*
954 * XMM, XMM.
955 */
956 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
958 IEM_MC_LOCAL(X86XMMREG, SseRes);
959 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
960 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
961 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
962 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
963 IEM_MC_PREPARE_SSE_USAGE();
964 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
965 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
966 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
967 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
968
969 IEM_MC_ADVANCE_RIP_AND_FINISH();
970 IEM_MC_END();
971 }
972 else
973 {
974 /*
975 * XMM, [mem64].
976 */
977 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
978 IEM_MC_LOCAL(X86XMMREG, SseRes);
979 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
980 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
981 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
982 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
984
985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
987 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
988 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
989
990 IEM_MC_PREPARE_SSE_USAGE();
991 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
992 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
993 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
994
995 IEM_MC_ADVANCE_RIP_AND_FINISH();
996 IEM_MC_END();
997 }
998}
999
1000
1001/**
1002 * Common worker for SSE2 instructions on the form:
1003 * pxxxx xmm1, xmm2/mem128
1004 *
1005 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1006 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1007 *
1008 * Exceptions type 4.
1009 */
1010FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1011{
1012 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1013 if (IEM_IS_MODRM_REG_MODE(bRm))
1014 {
1015 /*
1016 * XMM, XMM.
1017 */
1018 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1020 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1021 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1022 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1023 IEM_MC_PREPARE_SSE_USAGE();
1024 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1025 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1026 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1027 IEM_MC_ADVANCE_RIP_AND_FINISH();
1028 IEM_MC_END();
1029 }
1030 else
1031 {
1032 /*
1033 * XMM, [mem128].
1034 */
1035 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1036 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1037 IEM_MC_LOCAL(RTUINT128U, uSrc);
1038 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1040
1041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1043 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1044 /** @todo Most CPUs probably only read the high qword. We read everything to
1045 * make sure we apply segmentation and alignment checks correctly.
1046 * When we have time, it would be interesting to explore what real
1047 * CPUs actually does and whether it will do a TLB load for the lower
1048 * part or skip any associated \#PF. */
1049 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1050
1051 IEM_MC_PREPARE_SSE_USAGE();
1052 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1053 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1054
1055 IEM_MC_ADVANCE_RIP_AND_FINISH();
1056 IEM_MC_END();
1057 }
1058}
1059
1060
1061/**
1062 * Common worker for SSE3 instructions on the forms:
1063 * hxxx xmm1, xmm2/mem128
1064 *
1065 * Proper alignment of the 128-bit operand is enforced.
1066 * Exceptions type 2. SSE3 cpuid checks.
1067 *
1068 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1069 */
1070FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1071{
1072 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1073 if (IEM_IS_MODRM_REG_MODE(bRm))
1074 {
1075 /*
1076 * XMM, XMM.
1077 */
1078 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1080 IEM_MC_LOCAL(X86XMMREG, SseRes);
1081 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
1082 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1083 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1084 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1085 IEM_MC_PREPARE_SSE_USAGE();
1086 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1087 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1088 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1089 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
1090
1091 IEM_MC_ADVANCE_RIP_AND_FINISH();
1092 IEM_MC_END();
1093 }
1094 else
1095 {
1096 /*
1097 * XMM, [mem128].
1098 */
1099 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1100 IEM_MC_LOCAL(X86XMMREG, SseRes);
1101 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1102 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
1103 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1104 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1106
1107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1109 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1110 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1111
1112 IEM_MC_PREPARE_SSE_USAGE();
1113 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1114 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1115 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
1116
1117 IEM_MC_ADVANCE_RIP_AND_FINISH();
1118 IEM_MC_END();
1119 }
1120}
1121
1122
1123/** Opcode 0x0f 0x00 /0. */
1124FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1125{
1126 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1127 IEMOP_HLP_MIN_286();
1128 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1129
1130 if (IEM_IS_MODRM_REG_MODE(bRm))
1131 {
1132 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1133 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1134 iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1135 }
1136
1137 /* Ignore operand size here, memory refs are always 16-bit. */
1138 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1139 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1141 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1142 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1143 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1144 IEM_MC_END();
1145}
1146
1147
1148/** Opcode 0x0f 0x00 /1. */
1149FNIEMOPRM_DEF(iemOp_Grp6_str)
1150{
1151 IEMOP_MNEMONIC(str, "str Rv/Mw");
1152 IEMOP_HLP_MIN_286();
1153 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1154
1155
1156 if (IEM_IS_MODRM_REG_MODE(bRm))
1157 {
1158 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1159 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1160 iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1161 }
1162
1163 /* Ignore operand size here, memory refs are always 16-bit. */
1164 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1165 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1167 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1168 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1169 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1170 IEM_MC_END();
1171}
1172
1173
1174/** Opcode 0x0f 0x00 /2. */
1175FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1176{
1177 IEMOP_MNEMONIC(lldt, "lldt Ew");
1178 IEMOP_HLP_MIN_286();
1179 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1180
1181 if (IEM_IS_MODRM_REG_MODE(bRm))
1182 {
1183 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1184 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1185 IEM_MC_ARG(uint16_t, u16Sel, 0);
1186 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1187 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1188 IEM_MC_END();
1189 }
1190 else
1191 {
1192 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1193 IEM_MC_ARG(uint16_t, u16Sel, 0);
1194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1196 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1197 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1198 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1199 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1200 IEM_MC_END();
1201 }
1202}
1203
1204
1205/** Opcode 0x0f 0x00 /3. */
1206FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1207{
1208 IEMOP_MNEMONIC(ltr, "ltr Ew");
1209 IEMOP_HLP_MIN_286();
1210 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1211
1212 if (IEM_IS_MODRM_REG_MODE(bRm))
1213 {
1214 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1216 IEM_MC_ARG(uint16_t, u16Sel, 0);
1217 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1218 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1219 IEM_MC_END();
1220 }
1221 else
1222 {
1223 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1224 IEM_MC_ARG(uint16_t, u16Sel, 0);
1225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1228 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1229 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1230 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1231 IEM_MC_END();
1232 }
1233}
1234
1235
1236/* Need to associate flag info with the blocks, so duplicate the code. */
1237#define IEMOP_BODY_GRP6_VERX(bRm, fWrite) \
1238 IEMOP_HLP_MIN_286(); \
1239 IEMOP_HLP_NO_REAL_OR_V86_MODE(); \
1240 \
1241 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1242 { \
1243 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0); \
1244 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1245 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1246 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1247 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1248 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1249 IEM_MC_END(); \
1250 } \
1251 else \
1252 { \
1253 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0); \
1254 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1255 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1258 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1259 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1260 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1261 IEM_MC_END(); \
1262 } (void)0
1263
1264/**
1265 * @opmaps grp6
1266 * @opcode /4
1267 * @opflmodify zf
1268 */
1269FNIEMOPRM_DEF(iemOp_Grp6_verr)
1270{
1271 IEMOP_MNEMONIC(verr, "verr Ew");
1272 IEMOP_BODY_GRP6_VERX(bRm, false);
1273}
1274
1275
1276/**
1277 * @opmaps grp6
1278 * @opcode /5
1279 * @opflmodify zf
1280 */
1281FNIEMOPRM_DEF(iemOp_Grp6_verw)
1282{
1283 IEMOP_MNEMONIC(verw, "verw Ew");
1284 IEMOP_BODY_GRP6_VERX(bRm, true);
1285}
1286
1287
1288/**
1289 * Group 6 jump table.
1290 */
1291IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1292{
1293 iemOp_Grp6_sldt,
1294 iemOp_Grp6_str,
1295 iemOp_Grp6_lldt,
1296 iemOp_Grp6_ltr,
1297 iemOp_Grp6_verr,
1298 iemOp_Grp6_verw,
1299 iemOp_InvalidWithRM,
1300 iemOp_InvalidWithRM
1301};
1302
1303/** Opcode 0x0f 0x00. */
1304FNIEMOP_DEF(iemOp_Grp6)
1305{
1306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1307 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1308}
1309
1310
1311/** Opcode 0x0f 0x01 /0. */
1312FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1313{
1314 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1315 IEMOP_HLP_MIN_286();
1316 IEMOP_HLP_64BIT_OP_SIZE();
1317 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1318 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1321 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1322 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1323 IEM_MC_END();
1324}
1325
1326
1327/** Opcode 0x0f 0x01 /0. */
1328FNIEMOP_DEF(iemOp_Grp7_vmcall)
1329{
1330 IEMOP_MNEMONIC(vmcall, "vmcall");
1331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1332
1333 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1334 want all hypercalls regardless of instruction used, and if a
1335 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1336 (NEM/win makes ASSUMPTIONS about this behavior.) */
1337 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmcall);
1338}
1339
1340
1341/** Opcode 0x0f 0x01 /0. */
1342#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1343FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1344{
1345 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1346 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1347 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1348 IEMOP_HLP_DONE_DECODING();
1349 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1350 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1351 iemCImpl_vmlaunch);
1352}
1353#else
1354FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1355{
1356 IEMOP_BITCH_ABOUT_STUB();
1357 IEMOP_RAISE_INVALID_OPCODE_RET();
1358}
1359#endif
1360
1361
1362/** Opcode 0x0f 0x01 /0. */
1363#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1364FNIEMOP_DEF(iemOp_Grp7_vmresume)
1365{
1366 IEMOP_MNEMONIC(vmresume, "vmresume");
1367 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1368 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1369 IEMOP_HLP_DONE_DECODING();
1370 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1371 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1372 iemCImpl_vmresume);
1373}
1374#else
1375FNIEMOP_DEF(iemOp_Grp7_vmresume)
1376{
1377 IEMOP_BITCH_ABOUT_STUB();
1378 IEMOP_RAISE_INVALID_OPCODE_RET();
1379}
1380#endif
1381
1382
1383/** Opcode 0x0f 0x01 /0. */
1384#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1385FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1386{
1387 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1388 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1389 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1390 IEMOP_HLP_DONE_DECODING();
1391 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmxoff);
1392}
1393#else
1394FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1395{
1396 IEMOP_BITCH_ABOUT_STUB();
1397 IEMOP_RAISE_INVALID_OPCODE_RET();
1398}
1399#endif
1400
1401
1402/** Opcode 0x0f 0x01 /1. */
1403FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1404{
1405 IEMOP_MNEMONIC(sidt, "sidt Ms");
1406 IEMOP_HLP_MIN_286();
1407 IEMOP_HLP_64BIT_OP_SIZE();
1408 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1409 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1412 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1413 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1414 IEM_MC_END();
1415}
1416
1417
1418/** Opcode 0x0f 0x01 /1. */
1419FNIEMOP_DEF(iemOp_Grp7_monitor)
1420{
1421 IEMOP_MNEMONIC(monitor, "monitor");
1422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1423 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1424}
1425
1426
1427/** Opcode 0x0f 0x01 /1. */
1428FNIEMOP_DEF(iemOp_Grp7_mwait)
1429{
1430 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1432 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_mwait);
1433}
1434
1435
1436/** Opcode 0x0f 0x01 /2. */
1437FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1438{
1439 IEMOP_MNEMONIC(lgdt, "lgdt");
1440 IEMOP_HLP_64BIT_OP_SIZE();
1441 IEM_MC_BEGIN(0, 0);
1442 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1445 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1446 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1447 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1448 IEM_MC_END();
1449}
1450
1451
1452/** Opcode 0x0f 0x01 0xd0. */
1453FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1454{
1455 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1456 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1457 {
1458 /** @todo r=ramshankar: We should use
1459 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1460 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1461/** @todo testcase: test prefixes and exceptions. currently not checking for the
1462 * OPSIZE one ... */
1463 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1464 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
1465 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1466 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
1467 iemCImpl_xgetbv);
1468 }
1469 IEMOP_RAISE_INVALID_OPCODE_RET();
1470}
1471
1472
1473/** Opcode 0x0f 0x01 0xd1. */
1474FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1475{
1476 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1477 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1478 {
1479 /** @todo r=ramshankar: We should use
1480 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1481 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1482/** @todo testcase: test prefixes and exceptions. currently not checking for the
1483 * OPSIZE one ... */
1484 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1485 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_xsetbv);
1486 }
1487 IEMOP_RAISE_INVALID_OPCODE_RET();
1488}
1489
1490
1491/** Opcode 0x0f 0x01 /3. */
1492FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1493{
1494 IEMOP_MNEMONIC(lidt, "lidt");
1495 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1496 IEM_MC_BEGIN(0, 0);
1497 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1501 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg, /*=*/ enmEffOpSize, 2);
1502 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1503 IEM_MC_END();
1504}
1505
1506
1507/** Opcode 0x0f 0x01 0xd8. */
1508#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1509FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1510{
1511 IEMOP_MNEMONIC(vmrun, "vmrun");
1512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1513 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1514 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1515 iemCImpl_vmrun);
1516}
1517#else
1518FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1519#endif
1520
1521/** Opcode 0x0f 0x01 0xd9. */
1522FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1523{
1524 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1525 /** @todo r=bird: Table A-8 on page 524 in vol 3 has VMGEXIT for this
1526 * opcode sequence when F3 or F2 is used as prefix. So, the assumtion
1527 * here cannot be right... */
1528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1529
1530 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1531 want all hypercalls regardless of instruction used, and if a
1532 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1533 (NEM/win makes ASSUMPTIONS about this behavior.) */
1534 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmmcall);
1535}
1536
1537/** Opcode 0x0f 0x01 0xda. */
1538#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1539FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1540{
1541 IEMOP_MNEMONIC(vmload, "vmload");
1542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1543 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmload);
1544}
1545#else
1546FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1547#endif
1548
1549
1550/** Opcode 0x0f 0x01 0xdb. */
1551#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1552FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1553{
1554 IEMOP_MNEMONIC(vmsave, "vmsave");
1555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1556 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmsave);
1557}
1558#else
1559FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1560#endif
1561
1562
1563/** Opcode 0x0f 0x01 0xdc. */
1564#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1565FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1566{
1567 IEMOP_MNEMONIC(stgi, "stgi");
1568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1569 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_stgi);
1570}
1571#else
1572FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1573#endif
1574
1575
1576/** Opcode 0x0f 0x01 0xdd. */
1577#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1578FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1579{
1580 IEMOP_MNEMONIC(clgi, "clgi");
1581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1582 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clgi);
1583}
1584#else
1585FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1586#endif
1587
1588
1589/** Opcode 0x0f 0x01 0xdf. */
1590#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1591FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1592{
1593 IEMOP_MNEMONIC(invlpga, "invlpga");
1594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1595 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpga);
1596}
1597#else
1598FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1599#endif
1600
1601
1602/** Opcode 0x0f 0x01 0xde. */
1603#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1604FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1605{
1606 IEMOP_MNEMONIC(skinit, "skinit");
1607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1608 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_skinit);
1609}
1610#else
1611FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1612#endif
1613
1614
1615/** Opcode 0x0f 0x01 /4. */
1616FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1617{
1618 IEMOP_MNEMONIC(smsw, "smsw");
1619 IEMOP_HLP_MIN_286();
1620 if (IEM_IS_MODRM_REG_MODE(bRm))
1621 {
1622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1623 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1624 iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1625 }
1626
1627 /* Ignore operand size here, memory refs are always 16-bit. */
1628 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1629 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1632 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1633 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1634 IEM_MC_END();
1635}
1636
1637
1638/** Opcode 0x0f 0x01 /6. */
1639FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1640{
1641 /* The operand size is effectively ignored, all is 16-bit and only the
1642 lower 3-bits are used. */
1643 IEMOP_MNEMONIC(lmsw, "lmsw");
1644 IEMOP_HLP_MIN_286();
1645 if (IEM_IS_MODRM_REG_MODE(bRm))
1646 {
1647 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1649 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1650 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1651 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1652 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1653 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1654 IEM_MC_END();
1655 }
1656 else
1657 {
1658 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1659 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1660 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1663 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1664 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1665 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1666 IEM_MC_END();
1667 }
1668}
1669
1670
1671/** Opcode 0x0f 0x01 /7. */
1672FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1673{
1674 IEMOP_MNEMONIC(invlpg, "invlpg");
1675 IEMOP_HLP_MIN_486();
1676 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1677 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1680 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpg, GCPtrEffDst);
1681 IEM_MC_END();
1682}
1683
1684
1685/** Opcode 0x0f 0x01 0xf8. */
1686FNIEMOP_DEF(iemOp_Grp7_swapgs)
1687{
1688 IEMOP_MNEMONIC(swapgs, "swapgs");
1689 IEMOP_HLP_ONLY_64BIT();
1690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1691 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS), iemCImpl_swapgs);
1692}
1693
1694
1695/** Opcode 0x0f 0x01 0xf9. */
1696FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1697{
1698 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1700 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
1701 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1702 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
1703 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
1704 iemCImpl_rdtscp);
1705}
1706
1707
1708/**
1709 * Group 7 jump table, memory variant.
1710 */
1711IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1712{
1713 iemOp_Grp7_sgdt,
1714 iemOp_Grp7_sidt,
1715 iemOp_Grp7_lgdt,
1716 iemOp_Grp7_lidt,
1717 iemOp_Grp7_smsw,
1718 iemOp_InvalidWithRM,
1719 iemOp_Grp7_lmsw,
1720 iemOp_Grp7_invlpg
1721};
1722
1723
1724/** Opcode 0x0f 0x01. */
1725FNIEMOP_DEF(iemOp_Grp7)
1726{
1727 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1728 if (IEM_IS_MODRM_MEM_MODE(bRm))
1729 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1730
1731 switch (IEM_GET_MODRM_REG_8(bRm))
1732 {
1733 case 0:
1734 switch (IEM_GET_MODRM_RM_8(bRm))
1735 {
1736 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1737 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1738 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1739 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1740 }
1741 IEMOP_RAISE_INVALID_OPCODE_RET();
1742
1743 case 1:
1744 switch (IEM_GET_MODRM_RM_8(bRm))
1745 {
1746 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1747 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1748 }
1749 IEMOP_RAISE_INVALID_OPCODE_RET();
1750
1751 case 2:
1752 switch (IEM_GET_MODRM_RM_8(bRm))
1753 {
1754 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1755 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1756 }
1757 IEMOP_RAISE_INVALID_OPCODE_RET();
1758
1759 case 3:
1760 switch (IEM_GET_MODRM_RM_8(bRm))
1761 {
1762 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1763 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1764 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1765 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1766 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1767 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1768 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1769 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1770 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1771 }
1772
1773 case 4:
1774 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1775
1776 case 5:
1777 IEMOP_RAISE_INVALID_OPCODE_RET();
1778
1779 case 6:
1780 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1781
1782 case 7:
1783 switch (IEM_GET_MODRM_RM_8(bRm))
1784 {
1785 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1786 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1787 }
1788 IEMOP_RAISE_INVALID_OPCODE_RET();
1789
1790 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1791 }
1792}
1793
1794FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1795{
1796 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1798
1799 if (IEM_IS_MODRM_REG_MODE(bRm))
1800 {
1801 switch (pVCpu->iem.s.enmEffOpSize)
1802 {
1803 case IEMMODE_16BIT:
1804 IEM_MC_BEGIN(0, 0);
1805 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1806 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1807 IEM_MC_ARG(uint16_t, u16Sel, 1);
1808 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1809
1810 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1811 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1812 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1813 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1814
1815 IEM_MC_END();
1816 break;
1817
1818 case IEMMODE_32BIT:
1819 case IEMMODE_64BIT:
1820 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1821 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1822 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1823 IEM_MC_ARG(uint16_t, u16Sel, 1);
1824 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1825
1826 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1827 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1828 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1829 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1830
1831 IEM_MC_END();
1832 break;
1833
1834 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1835 }
1836 }
1837 else
1838 {
1839 switch (pVCpu->iem.s.enmEffOpSize)
1840 {
1841 case IEMMODE_16BIT:
1842 IEM_MC_BEGIN(0, 0);
1843 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1844 IEM_MC_ARG(uint16_t, u16Sel, 1);
1845 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1847
1848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1849 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1850
1851 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1852 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1853 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1854 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1855
1856 IEM_MC_END();
1857 break;
1858
1859 case IEMMODE_32BIT:
1860 case IEMMODE_64BIT:
1861 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1862 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1863 IEM_MC_ARG(uint16_t, u16Sel, 1);
1864 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1866
1867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1868 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1869/** @todo testcase: make sure it's a 16-bit read. */
1870
1871 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1872 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1873 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1874 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1875
1876 IEM_MC_END();
1877 break;
1878
1879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1880 }
1881 }
1882}
1883
1884
1885
1886/**
1887 * @opcode 0x02
1888 * @opflmodify zf
1889 */
1890FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1891{
1892 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1893 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1894}
1895
1896
1897/**
1898 * @opcode 0x03
1899 * @opflmodify zf
1900 */
1901FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1902{
1903 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1904 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1905}
1906
1907
1908/** Opcode 0x0f 0x05. */
1909FNIEMOP_DEF(iemOp_syscall)
1910{
1911 if (RT_LIKELY(pVCpu->iem.s.uTargetCpu != IEMTARGETCPU_286))
1912 {
1913 IEMOP_MNEMONIC(syscall, "syscall");
1914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1915 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1916 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0, iemCImpl_syscall);
1917 }
1918 else
1919 {
1920 IEMOP_MNEMONIC(loadall286, "loadall286");
1921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1922 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1923 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1924 RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_loadall286);
1925 }
1926}
1927
1928
1929/** Opcode 0x0f 0x06. */
1930FNIEMOP_DEF(iemOp_clts)
1931{
1932 IEMOP_MNEMONIC(clts, "clts");
1933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1934 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_clts);
1935}
1936
1937
1938/** Opcode 0x0f 0x07. */
1939FNIEMOP_DEF(iemOp_sysret)
1940{
1941 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1943 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1944 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
1945 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
1946}
1947
1948
1949/** Opcode 0x0f 0x08. */
1950FNIEMOP_DEF(iemOp_invd)
1951{
1952 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1953 IEMOP_HLP_MIN_486();
1954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1955 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invd);
1956}
1957
1958
1959/** Opcode 0x0f 0x09. */
1960FNIEMOP_DEF(iemOp_wbinvd)
1961{
1962 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1963 IEMOP_HLP_MIN_486();
1964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1965 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wbinvd);
1966}
1967
1968
1969/** Opcode 0x0f 0x0b. */
1970FNIEMOP_DEF(iemOp_ud2)
1971{
1972 IEMOP_MNEMONIC(ud2, "ud2");
1973 IEMOP_RAISE_INVALID_OPCODE_RET();
1974}
1975
1976/** Opcode 0x0f 0x0d. */
1977FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1978{
1979 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1980 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1981 {
1982 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1983 IEMOP_RAISE_INVALID_OPCODE_RET();
1984 }
1985
1986 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1987 if (IEM_IS_MODRM_REG_MODE(bRm))
1988 {
1989 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1990 IEMOP_RAISE_INVALID_OPCODE_RET();
1991 }
1992
1993 switch (IEM_GET_MODRM_REG_8(bRm))
1994 {
1995 case 2: /* Aliased to /0 for the time being. */
1996 case 4: /* Aliased to /0 for the time being. */
1997 case 5: /* Aliased to /0 for the time being. */
1998 case 6: /* Aliased to /0 for the time being. */
1999 case 7: /* Aliased to /0 for the time being. */
2000 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2001 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2002 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2003 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2004 }
2005
2006 IEM_MC_BEGIN(0, 0);
2007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2010 /* Currently a NOP. */
2011 IEM_MC_NOREF(GCPtrEffSrc);
2012 IEM_MC_ADVANCE_RIP_AND_FINISH();
2013 IEM_MC_END();
2014}
2015
2016
2017/** Opcode 0x0f 0x0e. */
2018FNIEMOP_DEF(iemOp_femms)
2019{
2020 IEMOP_MNEMONIC(femms, "femms");
2021
2022 IEM_MC_BEGIN(0, 0);
2023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2024 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2025 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2026 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2027 IEM_MC_FPU_FROM_MMX_MODE();
2028 IEM_MC_ADVANCE_RIP_AND_FINISH();
2029 IEM_MC_END();
2030}
2031
2032
2033/** Opcode 0x0f 0x0f. */
2034FNIEMOP_DEF(iemOp_3Dnow)
2035{
2036 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2037 {
2038 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2039 IEMOP_RAISE_INVALID_OPCODE_RET();
2040 }
2041
2042#ifdef IEM_WITH_3DNOW
2043 /* This is pretty sparse, use switch instead of table. */
2044 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2045 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2046#else
2047 IEMOP_BITCH_ABOUT_STUB();
2048 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2049#endif
2050}
2051
2052
2053/**
2054 * @opcode 0x10
2055 * @oppfx none
2056 * @opcpuid sse
2057 * @opgroup og_sse_simdfp_datamove
2058 * @opxcpttype 4UA
2059 * @optest op1=1 op2=2 -> op1=2
2060 * @optest op1=0 op2=-22 -> op1=-22
2061 */
2062FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2063{
2064 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2066 if (IEM_IS_MODRM_REG_MODE(bRm))
2067 {
2068 /*
2069 * XMM128, XMM128.
2070 */
2071 IEM_MC_BEGIN(0, 0);
2072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2073 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2074 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2075 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2076 IEM_GET_MODRM_RM(pVCpu, bRm));
2077 IEM_MC_ADVANCE_RIP_AND_FINISH();
2078 IEM_MC_END();
2079 }
2080 else
2081 {
2082 /*
2083 * XMM128, [mem128].
2084 */
2085 IEM_MC_BEGIN(0, 0);
2086 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2088
2089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2091 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2092 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2093
2094 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2095 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2096
2097 IEM_MC_ADVANCE_RIP_AND_FINISH();
2098 IEM_MC_END();
2099 }
2100
2101}
2102
2103
2104/**
2105 * @opcode 0x10
2106 * @oppfx 0x66
2107 * @opcpuid sse2
2108 * @opgroup og_sse2_pcksclr_datamove
2109 * @opxcpttype 4UA
2110 * @optest op1=1 op2=2 -> op1=2
2111 * @optest op1=0 op2=-42 -> op1=-42
2112 */
2113FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2114{
2115 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2117 if (IEM_IS_MODRM_REG_MODE(bRm))
2118 {
2119 /*
2120 * XMM128, XMM128.
2121 */
2122 IEM_MC_BEGIN(0, 0);
2123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2124 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2125 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2126 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2127 IEM_GET_MODRM_RM(pVCpu, bRm));
2128 IEM_MC_ADVANCE_RIP_AND_FINISH();
2129 IEM_MC_END();
2130 }
2131 else
2132 {
2133 /*
2134 * XMM128, [mem128].
2135 */
2136 IEM_MC_BEGIN(0, 0);
2137 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2139
2140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2142 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2143 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2144
2145 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2146 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2147
2148 IEM_MC_ADVANCE_RIP_AND_FINISH();
2149 IEM_MC_END();
2150 }
2151}
2152
2153
2154/**
2155 * @opcode 0x10
2156 * @oppfx 0xf3
2157 * @opcpuid sse
2158 * @opgroup og_sse_simdfp_datamove
2159 * @opxcpttype 5
2160 * @optest op1=1 op2=2 -> op1=2
2161 * @optest op1=0 op2=-22 -> op1=-22
2162 */
2163FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2164{
2165 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2167 if (IEM_IS_MODRM_REG_MODE(bRm))
2168 {
2169 /*
2170 * XMM32, XMM32.
2171 */
2172 IEM_MC_BEGIN(0, 0);
2173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2174 IEM_MC_LOCAL(uint32_t, uSrc);
2175
2176 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2177 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2178 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2179 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2180
2181 IEM_MC_ADVANCE_RIP_AND_FINISH();
2182 IEM_MC_END();
2183 }
2184 else
2185 {
2186 /*
2187 * XMM128, [mem32].
2188 */
2189 IEM_MC_BEGIN(0, 0);
2190 IEM_MC_LOCAL(uint32_t, uSrc);
2191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2192
2193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2195 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2196 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2197
2198 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2199 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2200
2201 IEM_MC_ADVANCE_RIP_AND_FINISH();
2202 IEM_MC_END();
2203 }
2204}
2205
2206
2207/**
2208 * @opcode 0x10
2209 * @oppfx 0xf2
2210 * @opcpuid sse2
2211 * @opgroup og_sse2_pcksclr_datamove
2212 * @opxcpttype 5
2213 * @optest op1=1 op2=2 -> op1=2
2214 * @optest op1=0 op2=-42 -> op1=-42
2215 */
2216FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2217{
2218 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2220 if (IEM_IS_MODRM_REG_MODE(bRm))
2221 {
2222 /*
2223 * XMM64, XMM64.
2224 */
2225 IEM_MC_BEGIN(0, 0);
2226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2227 IEM_MC_LOCAL(uint64_t, uSrc);
2228
2229 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2230 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2231 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2232 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2233
2234 IEM_MC_ADVANCE_RIP_AND_FINISH();
2235 IEM_MC_END();
2236 }
2237 else
2238 {
2239 /*
2240 * XMM128, [mem64].
2241 */
2242 IEM_MC_BEGIN(0, 0);
2243 IEM_MC_LOCAL(uint64_t, uSrc);
2244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2245
2246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2248 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2249 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2250
2251 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2252 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2253
2254 IEM_MC_ADVANCE_RIP_AND_FINISH();
2255 IEM_MC_END();
2256 }
2257}
2258
2259
2260/**
2261 * @opcode 0x11
2262 * @oppfx none
2263 * @opcpuid sse
2264 * @opgroup og_sse_simdfp_datamove
2265 * @opxcpttype 4UA
2266 * @optest op1=1 op2=2 -> op1=2
2267 * @optest op1=0 op2=-42 -> op1=-42
2268 */
2269FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2270{
2271 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2272 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2273 if (IEM_IS_MODRM_REG_MODE(bRm))
2274 {
2275 /*
2276 * XMM128, XMM128.
2277 */
2278 IEM_MC_BEGIN(0, 0);
2279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2280 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2281 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2282 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2283 IEM_GET_MODRM_REG(pVCpu, bRm));
2284 IEM_MC_ADVANCE_RIP_AND_FINISH();
2285 IEM_MC_END();
2286 }
2287 else
2288 {
2289 /*
2290 * [mem128], XMM128.
2291 */
2292 IEM_MC_BEGIN(0, 0);
2293 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2295
2296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2298 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2299 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2300
2301 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2302 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2303
2304 IEM_MC_ADVANCE_RIP_AND_FINISH();
2305 IEM_MC_END();
2306 }
2307}
2308
2309
2310/**
2311 * @opcode 0x11
2312 * @oppfx 0x66
2313 * @opcpuid sse2
2314 * @opgroup og_sse2_pcksclr_datamove
2315 * @opxcpttype 4UA
2316 * @optest op1=1 op2=2 -> op1=2
2317 * @optest op1=0 op2=-42 -> op1=-42
2318 */
2319FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2320{
2321 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2322 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2323 if (IEM_IS_MODRM_REG_MODE(bRm))
2324 {
2325 /*
2326 * XMM128, XMM128.
2327 */
2328 IEM_MC_BEGIN(0, 0);
2329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2330 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2331 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2332 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2333 IEM_GET_MODRM_REG(pVCpu, bRm));
2334 IEM_MC_ADVANCE_RIP_AND_FINISH();
2335 IEM_MC_END();
2336 }
2337 else
2338 {
2339 /*
2340 * [mem128], XMM128.
2341 */
2342 IEM_MC_BEGIN(0, 0);
2343 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2345
2346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2348 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2349 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2350
2351 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2352 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2353
2354 IEM_MC_ADVANCE_RIP_AND_FINISH();
2355 IEM_MC_END();
2356 }
2357}
2358
2359
2360/**
2361 * @opcode 0x11
2362 * @oppfx 0xf3
2363 * @opcpuid sse
2364 * @opgroup og_sse_simdfp_datamove
2365 * @opxcpttype 5
2366 * @optest op1=1 op2=2 -> op1=2
2367 * @optest op1=0 op2=-22 -> op1=-22
2368 */
2369FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2370{
2371 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2372 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2373 if (IEM_IS_MODRM_REG_MODE(bRm))
2374 {
2375 /*
2376 * XMM32, XMM32.
2377 */
2378 IEM_MC_BEGIN(0, 0);
2379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2380 IEM_MC_LOCAL(uint32_t, uSrc);
2381
2382 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2383 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2384 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2385 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2386
2387 IEM_MC_ADVANCE_RIP_AND_FINISH();
2388 IEM_MC_END();
2389 }
2390 else
2391 {
2392 /*
2393 * [mem32], XMM32.
2394 */
2395 IEM_MC_BEGIN(0, 0);
2396 IEM_MC_LOCAL(uint32_t, uSrc);
2397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2398
2399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2401 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2402 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2403
2404 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2405 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2406
2407 IEM_MC_ADVANCE_RIP_AND_FINISH();
2408 IEM_MC_END();
2409 }
2410}
2411
2412
2413/**
2414 * @opcode 0x11
2415 * @oppfx 0xf2
2416 * @opcpuid sse2
2417 * @opgroup og_sse2_pcksclr_datamove
2418 * @opxcpttype 5
2419 * @optest op1=1 op2=2 -> op1=2
2420 * @optest op1=0 op2=-42 -> op1=-42
2421 */
2422FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2423{
2424 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2425 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2426 if (IEM_IS_MODRM_REG_MODE(bRm))
2427 {
2428 /*
2429 * XMM64, XMM64.
2430 */
2431 IEM_MC_BEGIN(0, 0);
2432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2433 IEM_MC_LOCAL(uint64_t, uSrc);
2434
2435 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2436 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2437 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2438 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2439
2440 IEM_MC_ADVANCE_RIP_AND_FINISH();
2441 IEM_MC_END();
2442 }
2443 else
2444 {
2445 /*
2446 * [mem64], XMM64.
2447 */
2448 IEM_MC_BEGIN(0, 0);
2449 IEM_MC_LOCAL(uint64_t, uSrc);
2450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2451
2452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2454 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2455 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2456
2457 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2458 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2459
2460 IEM_MC_ADVANCE_RIP_AND_FINISH();
2461 IEM_MC_END();
2462 }
2463}
2464
2465
2466FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2467{
2468 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2469 if (IEM_IS_MODRM_REG_MODE(bRm))
2470 {
2471 /**
2472 * @opcode 0x12
2473 * @opcodesub 11 mr/reg
2474 * @oppfx none
2475 * @opcpuid sse
2476 * @opgroup og_sse_simdfp_datamove
2477 * @opxcpttype 5
2478 * @optest op1=1 op2=2 -> op1=2
2479 * @optest op1=0 op2=-42 -> op1=-42
2480 */
2481 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2482
2483 IEM_MC_BEGIN(0, 0);
2484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2485 IEM_MC_LOCAL(uint64_t, uSrc);
2486
2487 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2488 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2489 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2490 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2491
2492 IEM_MC_ADVANCE_RIP_AND_FINISH();
2493 IEM_MC_END();
2494 }
2495 else
2496 {
2497 /**
2498 * @opdone
2499 * @opcode 0x12
2500 * @opcodesub !11 mr/reg
2501 * @oppfx none
2502 * @opcpuid sse
2503 * @opgroup og_sse_simdfp_datamove
2504 * @opxcpttype 5
2505 * @optest op1=1 op2=2 -> op1=2
2506 * @optest op1=0 op2=-42 -> op1=-42
2507 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2508 */
2509 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2510
2511 IEM_MC_BEGIN(0, 0);
2512 IEM_MC_LOCAL(uint64_t, uSrc);
2513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2514
2515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2517 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2518 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2519
2520 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2521 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2522
2523 IEM_MC_ADVANCE_RIP_AND_FINISH();
2524 IEM_MC_END();
2525 }
2526}
2527
2528
2529/**
2530 * @opcode 0x12
2531 * @opcodesub !11 mr/reg
2532 * @oppfx 0x66
2533 * @opcpuid sse2
2534 * @opgroup og_sse2_pcksclr_datamove
2535 * @opxcpttype 5
2536 * @optest op1=1 op2=2 -> op1=2
2537 * @optest op1=0 op2=-42 -> op1=-42
2538 */
2539FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2540{
2541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2542 if (IEM_IS_MODRM_MEM_MODE(bRm))
2543 {
2544 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2545
2546 IEM_MC_BEGIN(0, 0);
2547 IEM_MC_LOCAL(uint64_t, uSrc);
2548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2549
2550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2552 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2553 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2554
2555 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2556 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2557
2558 IEM_MC_ADVANCE_RIP_AND_FINISH();
2559 IEM_MC_END();
2560 }
2561
2562 /**
2563 * @opdone
2564 * @opmnemonic ud660f12m3
2565 * @opcode 0x12
2566 * @opcodesub 11 mr/reg
2567 * @oppfx 0x66
2568 * @opunused immediate
2569 * @opcpuid sse
2570 * @optest ->
2571 */
2572 else
2573 IEMOP_RAISE_INVALID_OPCODE_RET();
2574}
2575
2576
2577/**
2578 * @opcode 0x12
2579 * @oppfx 0xf3
2580 * @opcpuid sse3
2581 * @opgroup og_sse3_pcksclr_datamove
2582 * @opxcpttype 4
2583 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2584 * op1=0x00000002000000020000000100000001
2585 */
2586FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2587{
2588 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2589 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2590 if (IEM_IS_MODRM_REG_MODE(bRm))
2591 {
2592 /*
2593 * XMM, XMM.
2594 */
2595 IEM_MC_BEGIN(0, 0);
2596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2597 IEM_MC_LOCAL(RTUINT128U, uSrc);
2598
2599 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2600 IEM_MC_PREPARE_SSE_USAGE();
2601
2602 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2603 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2604 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2605 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2606 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2607
2608 IEM_MC_ADVANCE_RIP_AND_FINISH();
2609 IEM_MC_END();
2610 }
2611 else
2612 {
2613 /*
2614 * XMM, [mem128].
2615 */
2616 IEM_MC_BEGIN(0, 0);
2617 IEM_MC_LOCAL(RTUINT128U, uSrc);
2618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2619
2620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2622 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2623 IEM_MC_PREPARE_SSE_USAGE();
2624
2625 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2626 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2627 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2628 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2629 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2630
2631 IEM_MC_ADVANCE_RIP_AND_FINISH();
2632 IEM_MC_END();
2633 }
2634}
2635
2636
2637/**
2638 * @opcode 0x12
2639 * @oppfx 0xf2
2640 * @opcpuid sse3
2641 * @opgroup og_sse3_pcksclr_datamove
2642 * @opxcpttype 5
2643 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2644 * op1=0x22222222111111112222222211111111
2645 */
2646FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2647{
2648 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2649 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2650 if (IEM_IS_MODRM_REG_MODE(bRm))
2651 {
2652 /*
2653 * XMM128, XMM64.
2654 */
2655 IEM_MC_BEGIN(0, 0);
2656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2657 IEM_MC_LOCAL(uint64_t, uSrc);
2658
2659 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2660 IEM_MC_PREPARE_SSE_USAGE();
2661
2662 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2663 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2664 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2665
2666 IEM_MC_ADVANCE_RIP_AND_FINISH();
2667 IEM_MC_END();
2668 }
2669 else
2670 {
2671 /*
2672 * XMM128, [mem64].
2673 */
2674 IEM_MC_BEGIN(0, 0);
2675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2676 IEM_MC_LOCAL(uint64_t, uSrc);
2677
2678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2680 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2681 IEM_MC_PREPARE_SSE_USAGE();
2682
2683 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2684 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2685 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2686
2687 IEM_MC_ADVANCE_RIP_AND_FINISH();
2688 IEM_MC_END();
2689 }
2690}
2691
2692
2693/**
2694 * @opcode 0x13
2695 * @opcodesub !11 mr/reg
2696 * @oppfx none
2697 * @opcpuid sse
2698 * @opgroup og_sse_simdfp_datamove
2699 * @opxcpttype 5
2700 * @optest op1=1 op2=2 -> op1=2
2701 * @optest op1=0 op2=-42 -> op1=-42
2702 */
2703FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2704{
2705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2706 if (IEM_IS_MODRM_MEM_MODE(bRm))
2707 {
2708 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2709
2710 IEM_MC_BEGIN(0, 0);
2711 IEM_MC_LOCAL(uint64_t, uSrc);
2712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2713
2714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2716 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2717 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2718
2719 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2720 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2721
2722 IEM_MC_ADVANCE_RIP_AND_FINISH();
2723 IEM_MC_END();
2724 }
2725
2726 /**
2727 * @opdone
2728 * @opmnemonic ud0f13m3
2729 * @opcode 0x13
2730 * @opcodesub 11 mr/reg
2731 * @oppfx none
2732 * @opunused immediate
2733 * @opcpuid sse
2734 * @optest ->
2735 */
2736 else
2737 IEMOP_RAISE_INVALID_OPCODE_RET();
2738}
2739
2740
2741/**
2742 * @opcode 0x13
2743 * @opcodesub !11 mr/reg
2744 * @oppfx 0x66
2745 * @opcpuid sse2
2746 * @opgroup og_sse2_pcksclr_datamove
2747 * @opxcpttype 5
2748 * @optest op1=1 op2=2 -> op1=2
2749 * @optest op1=0 op2=-42 -> op1=-42
2750 */
2751FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2752{
2753 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2754 if (IEM_IS_MODRM_MEM_MODE(bRm))
2755 {
2756 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2757
2758 IEM_MC_BEGIN(0, 0);
2759 IEM_MC_LOCAL(uint64_t, uSrc);
2760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2761
2762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2764 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2765 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2766
2767 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2768 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2769
2770 IEM_MC_ADVANCE_RIP_AND_FINISH();
2771 IEM_MC_END();
2772 }
2773
2774 /**
2775 * @opdone
2776 * @opmnemonic ud660f13m3
2777 * @opcode 0x13
2778 * @opcodesub 11 mr/reg
2779 * @oppfx 0x66
2780 * @opunused immediate
2781 * @opcpuid sse
2782 * @optest ->
2783 */
2784 else
2785 IEMOP_RAISE_INVALID_OPCODE_RET();
2786}
2787
2788
2789/**
2790 * @opmnemonic udf30f13
2791 * @opcode 0x13
2792 * @oppfx 0xf3
2793 * @opunused intel-modrm
2794 * @opcpuid sse
2795 * @optest ->
2796 * @opdone
2797 */
2798
2799/**
2800 * @opmnemonic udf20f13
2801 * @opcode 0x13
2802 * @oppfx 0xf2
2803 * @opunused intel-modrm
2804 * @opcpuid sse
2805 * @optest ->
2806 * @opdone
2807 */
2808
2809/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2810FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2811{
2812 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2813 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2814}
2815
2816
2817/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2818FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2819{
2820 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2821 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2822}
2823
2824
2825/**
2826 * @opdone
2827 * @opmnemonic udf30f14
2828 * @opcode 0x14
2829 * @oppfx 0xf3
2830 * @opunused intel-modrm
2831 * @opcpuid sse
2832 * @optest ->
2833 * @opdone
2834 */
2835
2836/**
2837 * @opmnemonic udf20f14
2838 * @opcode 0x14
2839 * @oppfx 0xf2
2840 * @opunused intel-modrm
2841 * @opcpuid sse
2842 * @optest ->
2843 * @opdone
2844 */
2845
2846/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2847FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2848{
2849 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2850 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2851}
2852
2853
2854/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2855FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2856{
2857 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2858 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2859}
2860
2861
2862/* Opcode 0xf3 0x0f 0x15 - invalid */
2863/* Opcode 0xf2 0x0f 0x15 - invalid */
2864
2865/**
2866 * @opdone
2867 * @opmnemonic udf30f15
2868 * @opcode 0x15
2869 * @oppfx 0xf3
2870 * @opunused intel-modrm
2871 * @opcpuid sse
2872 * @optest ->
2873 * @opdone
2874 */
2875
2876/**
2877 * @opmnemonic udf20f15
2878 * @opcode 0x15
2879 * @oppfx 0xf2
2880 * @opunused intel-modrm
2881 * @opcpuid sse
2882 * @optest ->
2883 * @opdone
2884 */
2885
2886FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2887{
2888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2889 if (IEM_IS_MODRM_REG_MODE(bRm))
2890 {
2891 /**
2892 * @opcode 0x16
2893 * @opcodesub 11 mr/reg
2894 * @oppfx none
2895 * @opcpuid sse
2896 * @opgroup og_sse_simdfp_datamove
2897 * @opxcpttype 5
2898 * @optest op1=1 op2=2 -> op1=2
2899 * @optest op1=0 op2=-42 -> op1=-42
2900 */
2901 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2902
2903 IEM_MC_BEGIN(0, 0);
2904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2905 IEM_MC_LOCAL(uint64_t, uSrc);
2906
2907 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2908 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2909 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2910 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2911
2912 IEM_MC_ADVANCE_RIP_AND_FINISH();
2913 IEM_MC_END();
2914 }
2915 else
2916 {
2917 /**
2918 * @opdone
2919 * @opcode 0x16
2920 * @opcodesub !11 mr/reg
2921 * @oppfx none
2922 * @opcpuid sse
2923 * @opgroup og_sse_simdfp_datamove
2924 * @opxcpttype 5
2925 * @optest op1=1 op2=2 -> op1=2
2926 * @optest op1=0 op2=-42 -> op1=-42
2927 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2928 */
2929 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2930
2931 IEM_MC_BEGIN(0, 0);
2932 IEM_MC_LOCAL(uint64_t, uSrc);
2933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2934
2935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2937 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2938 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2939
2940 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2941 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2942
2943 IEM_MC_ADVANCE_RIP_AND_FINISH();
2944 IEM_MC_END();
2945 }
2946}
2947
2948
2949/**
2950 * @opcode 0x16
2951 * @opcodesub !11 mr/reg
2952 * @oppfx 0x66
2953 * @opcpuid sse2
2954 * @opgroup og_sse2_pcksclr_datamove
2955 * @opxcpttype 5
2956 * @optest op1=1 op2=2 -> op1=2
2957 * @optest op1=0 op2=-42 -> op1=-42
2958 */
2959FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2960{
2961 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2962 if (IEM_IS_MODRM_MEM_MODE(bRm))
2963 {
2964 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2965
2966 IEM_MC_BEGIN(0, 0);
2967 IEM_MC_LOCAL(uint64_t, uSrc);
2968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2969
2970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2972 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2973 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2974
2975 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2976 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2977
2978 IEM_MC_ADVANCE_RIP_AND_FINISH();
2979 IEM_MC_END();
2980 }
2981
2982 /**
2983 * @opdone
2984 * @opmnemonic ud660f16m3
2985 * @opcode 0x16
2986 * @opcodesub 11 mr/reg
2987 * @oppfx 0x66
2988 * @opunused immediate
2989 * @opcpuid sse
2990 * @optest ->
2991 */
2992 else
2993 IEMOP_RAISE_INVALID_OPCODE_RET();
2994}
2995
2996
2997/**
2998 * @opcode 0x16
2999 * @oppfx 0xf3
3000 * @opcpuid sse3
3001 * @opgroup og_sse3_pcksclr_datamove
3002 * @opxcpttype 4
3003 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3004 * op1=0x00000002000000020000000100000001
3005 */
3006FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3007{
3008 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3009 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3010 if (IEM_IS_MODRM_REG_MODE(bRm))
3011 {
3012 /*
3013 * XMM128, XMM128.
3014 */
3015 IEM_MC_BEGIN(0, 0);
3016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3017 IEM_MC_LOCAL(RTUINT128U, uSrc);
3018
3019 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3020 IEM_MC_PREPARE_SSE_USAGE();
3021
3022 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3023 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3024 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3025 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3026 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3027
3028 IEM_MC_ADVANCE_RIP_AND_FINISH();
3029 IEM_MC_END();
3030 }
3031 else
3032 {
3033 /*
3034 * XMM128, [mem128].
3035 */
3036 IEM_MC_BEGIN(0, 0);
3037 IEM_MC_LOCAL(RTUINT128U, uSrc);
3038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3039
3040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3042 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3043 IEM_MC_PREPARE_SSE_USAGE();
3044
3045 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3046 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3047 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3048 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3049 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3050
3051 IEM_MC_ADVANCE_RIP_AND_FINISH();
3052 IEM_MC_END();
3053 }
3054}
3055
3056/**
3057 * @opdone
3058 * @opmnemonic udf30f16
3059 * @opcode 0x16
3060 * @oppfx 0xf2
3061 * @opunused intel-modrm
3062 * @opcpuid sse
3063 * @optest ->
3064 * @opdone
3065 */
3066
3067
3068/**
3069 * @opcode 0x17
3070 * @opcodesub !11 mr/reg
3071 * @oppfx none
3072 * @opcpuid sse
3073 * @opgroup og_sse_simdfp_datamove
3074 * @opxcpttype 5
3075 * @optest op1=1 op2=2 -> op1=2
3076 * @optest op1=0 op2=-42 -> op1=-42
3077 */
3078FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3079{
3080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3081 if (IEM_IS_MODRM_MEM_MODE(bRm))
3082 {
3083 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3084
3085 IEM_MC_BEGIN(0, 0);
3086 IEM_MC_LOCAL(uint64_t, uSrc);
3087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3088
3089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3091 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3092 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3093
3094 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3095 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3096
3097 IEM_MC_ADVANCE_RIP_AND_FINISH();
3098 IEM_MC_END();
3099 }
3100
3101 /**
3102 * @opdone
3103 * @opmnemonic ud0f17m3
3104 * @opcode 0x17
3105 * @opcodesub 11 mr/reg
3106 * @oppfx none
3107 * @opunused immediate
3108 * @opcpuid sse
3109 * @optest ->
3110 */
3111 else
3112 IEMOP_RAISE_INVALID_OPCODE_RET();
3113}
3114
3115
3116/**
3117 * @opcode 0x17
3118 * @opcodesub !11 mr/reg
3119 * @oppfx 0x66
3120 * @opcpuid sse2
3121 * @opgroup og_sse2_pcksclr_datamove
3122 * @opxcpttype 5
3123 * @optest op1=1 op2=2 -> op1=2
3124 * @optest op1=0 op2=-42 -> op1=-42
3125 */
3126FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3127{
3128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3129 if (IEM_IS_MODRM_MEM_MODE(bRm))
3130 {
3131 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3132
3133 IEM_MC_BEGIN(0, 0);
3134 IEM_MC_LOCAL(uint64_t, uSrc);
3135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3136
3137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3139 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3140 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3141
3142 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3143 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3144
3145 IEM_MC_ADVANCE_RIP_AND_FINISH();
3146 IEM_MC_END();
3147 }
3148
3149 /**
3150 * @opdone
3151 * @opmnemonic ud660f17m3
3152 * @opcode 0x17
3153 * @opcodesub 11 mr/reg
3154 * @oppfx 0x66
3155 * @opunused immediate
3156 * @opcpuid sse
3157 * @optest ->
3158 */
3159 else
3160 IEMOP_RAISE_INVALID_OPCODE_RET();
3161}
3162
3163
3164/**
3165 * @opdone
3166 * @opmnemonic udf30f17
3167 * @opcode 0x17
3168 * @oppfx 0xf3
3169 * @opunused intel-modrm
3170 * @opcpuid sse
3171 * @optest ->
3172 * @opdone
3173 */
3174
3175/**
3176 * @opmnemonic udf20f17
3177 * @opcode 0x17
3178 * @oppfx 0xf2
3179 * @opunused intel-modrm
3180 * @opcpuid sse
3181 * @optest ->
3182 * @opdone
3183 */
3184
3185
3186/** Opcode 0x0f 0x18. */
3187FNIEMOP_DEF(iemOp_prefetch_Grp16)
3188{
3189 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3190 if (IEM_IS_MODRM_MEM_MODE(bRm))
3191 {
3192 switch (IEM_GET_MODRM_REG_8(bRm))
3193 {
3194 case 4: /* Aliased to /0 for the time being according to AMD. */
3195 case 5: /* Aliased to /0 for the time being according to AMD. */
3196 case 6: /* Aliased to /0 for the time being according to AMD. */
3197 case 7: /* Aliased to /0 for the time being according to AMD. */
3198 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3199 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3200 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3201 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3203 }
3204
3205 IEM_MC_BEGIN(0, 0);
3206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3209 /* Currently a NOP. */
3210 IEM_MC_NOREF(GCPtrEffSrc);
3211 IEM_MC_ADVANCE_RIP_AND_FINISH();
3212 IEM_MC_END();
3213 }
3214 else
3215 IEMOP_RAISE_INVALID_OPCODE_RET();
3216}
3217
3218
3219/** Opcode 0x0f 0x19..0x1f. */
3220FNIEMOP_DEF(iemOp_nop_Ev)
3221{
3222 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3223 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3224 if (IEM_IS_MODRM_REG_MODE(bRm))
3225 {
3226 IEM_MC_BEGIN(0, 0);
3227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3228 IEM_MC_ADVANCE_RIP_AND_FINISH();
3229 IEM_MC_END();
3230 }
3231 else
3232 {
3233 IEM_MC_BEGIN(0, 0);
3234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3237 /* Currently a NOP. */
3238 IEM_MC_NOREF(GCPtrEffSrc);
3239 IEM_MC_ADVANCE_RIP_AND_FINISH();
3240 IEM_MC_END();
3241 }
3242}
3243
3244
3245/** Opcode 0x0f 0x20. */
3246FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3247{
3248 /* mod is ignored, as is operand size overrides. */
3249/** @todo testcase: check memory encoding. */
3250 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3251 IEMOP_HLP_MIN_386();
3252 if (IEM_IS_64BIT_CODE(pVCpu))
3253 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3254 else
3255 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3256
3257 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3258 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3259 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3260 {
3261 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3262 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3263 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3264 iCrReg |= 8;
3265 }
3266 switch (iCrReg)
3267 {
3268 case 0: case 2: case 3: case 4: case 8:
3269 break;
3270 default:
3271 IEMOP_RAISE_INVALID_OPCODE_RET();
3272 }
3273 IEMOP_HLP_DONE_DECODING();
3274
3275 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3276 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3277 iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3278}
3279
3280
3281/** Opcode 0x0f 0x21. */
3282FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3283{
3284/** @todo testcase: check memory encoding. */
3285 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3286 IEMOP_HLP_MIN_386();
3287 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3289 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3290 IEMOP_RAISE_INVALID_OPCODE_RET();
3291 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3292 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3293 iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3294}
3295
3296
3297/** Opcode 0x0f 0x22. */
3298FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3299{
3300 /* mod is ignored, as is operand size overrides. */
3301 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3302 IEMOP_HLP_MIN_386();
3303 if (IEM_IS_64BIT_CODE(pVCpu))
3304 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3305 else
3306 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3307
3308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3309 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3310 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3311 {
3312 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3313 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3314 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3315 iCrReg |= 8;
3316 }
3317 switch (iCrReg)
3318 {
3319 case 0: case 2: case 3: case 4: case 8:
3320 break;
3321 default:
3322 IEMOP_RAISE_INVALID_OPCODE_RET();
3323 }
3324 IEMOP_HLP_DONE_DECODING();
3325
3326 /** @todo r=aeichner Split this up as flushing the cr0 is excessive for crX != 0? */
3327 if (iCrReg & (2 | 8))
3328 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, 0,
3329 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3330 else
3331 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0) | RT_BIT_64(kIemNativeGstReg_Cr4),
3332 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3333}
3334
3335
3336/** Opcode 0x0f 0x23. */
3337FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3338{
3339 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3340 IEMOP_HLP_MIN_386();
3341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3343 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3344 IEMOP_RAISE_INVALID_OPCODE_RET();
3345 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, 0,
3346 iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3347}
3348
3349
3350/** Opcode 0x0f 0x24. */
3351FNIEMOP_DEF(iemOp_mov_Rd_Td)
3352{
3353 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3354 IEMOP_HLP_MIN_386();
3355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3357 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3358 IEMOP_RAISE_INVALID_OPCODE_RET();
3359 IEM_MC_DEFER_TO_CIMPL_2_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3360 iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3361}
3362
3363
3364/** Opcode 0x0f 0x26. */
3365FNIEMOP_DEF(iemOp_mov_Td_Rd)
3366{
3367 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3368 IEMOP_HLP_MIN_386();
3369 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3371 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3372 IEMOP_RAISE_INVALID_OPCODE_RET();
3373 IEM_MC_DEFER_TO_CIMPL_2_RET(0, 0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3374}
3375
3376
3377/**
3378 * @opcode 0x28
3379 * @oppfx none
3380 * @opcpuid sse
3381 * @opgroup og_sse_simdfp_datamove
3382 * @opxcpttype 1
3383 * @optest op1=1 op2=2 -> op1=2
3384 * @optest op1=0 op2=-42 -> op1=-42
3385 */
3386FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3387{
3388 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3390 if (IEM_IS_MODRM_REG_MODE(bRm))
3391 {
3392 /*
3393 * Register, register.
3394 */
3395 IEM_MC_BEGIN(0, 0);
3396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3397 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3398 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3399 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3400 IEM_GET_MODRM_RM(pVCpu, bRm));
3401 IEM_MC_ADVANCE_RIP_AND_FINISH();
3402 IEM_MC_END();
3403 }
3404 else
3405 {
3406 /*
3407 * Register, memory.
3408 */
3409 IEM_MC_BEGIN(0, 0);
3410 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3412
3413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3415 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3416 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3417
3418 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3419 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3420
3421 IEM_MC_ADVANCE_RIP_AND_FINISH();
3422 IEM_MC_END();
3423 }
3424}
3425
3426/**
3427 * @opcode 0x28
3428 * @oppfx 66
3429 * @opcpuid sse2
3430 * @opgroup og_sse2_pcksclr_datamove
3431 * @opxcpttype 1
3432 * @optest op1=1 op2=2 -> op1=2
3433 * @optest op1=0 op2=-42 -> op1=-42
3434 */
3435FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3436{
3437 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3438 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3439 if (IEM_IS_MODRM_REG_MODE(bRm))
3440 {
3441 /*
3442 * Register, register.
3443 */
3444 IEM_MC_BEGIN(0, 0);
3445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3446 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3447 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3448 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3449 IEM_GET_MODRM_RM(pVCpu, bRm));
3450 IEM_MC_ADVANCE_RIP_AND_FINISH();
3451 IEM_MC_END();
3452 }
3453 else
3454 {
3455 /*
3456 * Register, memory.
3457 */
3458 IEM_MC_BEGIN(0, 0);
3459 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3461
3462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3466
3467 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3468 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3469
3470 IEM_MC_ADVANCE_RIP_AND_FINISH();
3471 IEM_MC_END();
3472 }
3473}
3474
3475/* Opcode 0xf3 0x0f 0x28 - invalid */
3476/* Opcode 0xf2 0x0f 0x28 - invalid */
3477
3478/**
3479 * @opcode 0x29
3480 * @oppfx none
3481 * @opcpuid sse
3482 * @opgroup og_sse_simdfp_datamove
3483 * @opxcpttype 1
3484 * @optest op1=1 op2=2 -> op1=2
3485 * @optest op1=0 op2=-42 -> op1=-42
3486 */
3487FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3488{
3489 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3491 if (IEM_IS_MODRM_REG_MODE(bRm))
3492 {
3493 /*
3494 * Register, register.
3495 */
3496 IEM_MC_BEGIN(0, 0);
3497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3498 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3499 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3500 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3501 IEM_GET_MODRM_REG(pVCpu, bRm));
3502 IEM_MC_ADVANCE_RIP_AND_FINISH();
3503 IEM_MC_END();
3504 }
3505 else
3506 {
3507 /*
3508 * Memory, register.
3509 */
3510 IEM_MC_BEGIN(0, 0);
3511 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3513
3514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3516 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3517 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3518
3519 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3520 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3521
3522 IEM_MC_ADVANCE_RIP_AND_FINISH();
3523 IEM_MC_END();
3524 }
3525}
3526
3527/**
3528 * @opcode 0x29
3529 * @oppfx 66
3530 * @opcpuid sse2
3531 * @opgroup og_sse2_pcksclr_datamove
3532 * @opxcpttype 1
3533 * @optest op1=1 op2=2 -> op1=2
3534 * @optest op1=0 op2=-42 -> op1=-42
3535 */
3536FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3537{
3538 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3540 if (IEM_IS_MODRM_REG_MODE(bRm))
3541 {
3542 /*
3543 * Register, register.
3544 */
3545 IEM_MC_BEGIN(0, 0);
3546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3547 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3548 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3549 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3550 IEM_GET_MODRM_REG(pVCpu, bRm));
3551 IEM_MC_ADVANCE_RIP_AND_FINISH();
3552 IEM_MC_END();
3553 }
3554 else
3555 {
3556 /*
3557 * Memory, register.
3558 */
3559 IEM_MC_BEGIN(0, 0);
3560 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3562
3563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3565 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3566 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3567
3568 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3569 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3570
3571 IEM_MC_ADVANCE_RIP_AND_FINISH();
3572 IEM_MC_END();
3573 }
3574}
3575
3576/* Opcode 0xf3 0x0f 0x29 - invalid */
3577/* Opcode 0xf2 0x0f 0x29 - invalid */
3578
3579
3580/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3581FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3582{
3583 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3585 if (IEM_IS_MODRM_REG_MODE(bRm))
3586 {
3587 /*
3588 * XMM, MMX
3589 */
3590 IEM_MC_BEGIN(0, 0);
3591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3592 IEM_MC_LOCAL(X86XMMREG, Dst);
3593 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3594 IEM_MC_ARG(uint64_t, u64Src, 1);
3595 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3596 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3597 IEM_MC_PREPARE_FPU_USAGE();
3598 IEM_MC_FPU_TO_MMX_MODE();
3599
3600 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3601 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3602
3603 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2ps_u128, pDst, u64Src);
3604 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3605
3606 IEM_MC_ADVANCE_RIP_AND_FINISH();
3607 IEM_MC_END();
3608 }
3609 else
3610 {
3611 /*
3612 * XMM, [mem64]
3613 */
3614 IEM_MC_BEGIN(0, 0);
3615 IEM_MC_LOCAL(X86XMMREG, Dst);
3616 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3617 IEM_MC_ARG(uint64_t, u64Src, 1);
3618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3619
3620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3622 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3623 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3624
3625 IEM_MC_PREPARE_FPU_USAGE();
3626
3627 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3628 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3629
3630 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2ps_u128, pDst, u64Src);
3631 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3632
3633 IEM_MC_ADVANCE_RIP_AND_FINISH();
3634 IEM_MC_FPU_TO_MMX_MODE();
3635 IEM_MC_END();
3636 }
3637}
3638
3639
3640/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3641FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3642{
3643 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3645 if (IEM_IS_MODRM_REG_MODE(bRm))
3646 {
3647 /*
3648 * XMM, MMX
3649 */
3650 IEM_MC_BEGIN(0, 0);
3651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3652 IEM_MC_LOCAL(X86XMMREG, Dst);
3653 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3654 IEM_MC_ARG(uint64_t, u64Src, 1);
3655 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3656 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3657 IEM_MC_PREPARE_FPU_USAGE();
3658 IEM_MC_FPU_TO_MMX_MODE();
3659
3660 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3661
3662 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2pd_u128, pDst, u64Src);
3663 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3664
3665 IEM_MC_ADVANCE_RIP_AND_FINISH();
3666 IEM_MC_END();
3667 }
3668 else
3669 {
3670 /*
3671 * XMM, [mem64]
3672 */
3673 IEM_MC_BEGIN(0, 0);
3674 IEM_MC_LOCAL(X86XMMREG, Dst);
3675 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3676 IEM_MC_ARG(uint64_t, u64Src, 1);
3677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3678
3679 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3681 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3682 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3683 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3684
3685 /* Doesn't cause a transition to MMX mode. */
3686 IEM_MC_PREPARE_SSE_USAGE();
3687
3688 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2pd_u128, pDst, u64Src);
3689 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3690
3691 IEM_MC_ADVANCE_RIP_AND_FINISH();
3692 IEM_MC_END();
3693 }
3694}
3695
3696
3697/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3698FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3699{
3700 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3701
3702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3703 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3704 {
3705 if (IEM_IS_MODRM_REG_MODE(bRm))
3706 {
3707 /* XMM, greg64 */
3708 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3709 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3710 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3711 IEM_MC_ARG(const int64_t *, pi64Src, 1);
3712
3713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3714 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3715 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3716
3717 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3718 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i64, pr32Dst, pi64Src);
3719 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3720
3721 IEM_MC_ADVANCE_RIP_AND_FINISH();
3722 IEM_MC_END();
3723 }
3724 else
3725 {
3726 /* XMM, [mem64] */
3727 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3729 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3730 IEM_MC_LOCAL(int64_t, i64Src);
3731 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3732 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 1);
3733
3734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3736 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3737 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3738
3739 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3740 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i64, pr32Dst, pi64Src);
3741 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3742
3743 IEM_MC_ADVANCE_RIP_AND_FINISH();
3744 IEM_MC_END();
3745 }
3746 }
3747 else
3748 {
3749 if (IEM_IS_MODRM_REG_MODE(bRm))
3750 {
3751 /* greg, XMM */
3752 IEM_MC_BEGIN(0, 0);
3753 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3754 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3755 IEM_MC_ARG(const int32_t *, pi32Src, 1);
3756
3757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3758 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3759 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3760
3761 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3762 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i32, pr32Dst, pi32Src);
3763 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3764
3765 IEM_MC_ADVANCE_RIP_AND_FINISH();
3766 IEM_MC_END();
3767 }
3768 else
3769 {
3770 /* greg, [mem32] */
3771 IEM_MC_BEGIN(0, 0);
3772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3773 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3774 IEM_MC_LOCAL(int32_t, i32Src);
3775 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3776 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 1);
3777
3778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3780 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3781 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3782
3783 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3784 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i32, pr32Dst, pi32Src);
3785 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3786
3787 IEM_MC_ADVANCE_RIP_AND_FINISH();
3788 IEM_MC_END();
3789 }
3790 }
3791}
3792
3793
3794/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3795FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3796{
3797 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3798
3799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3800 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3801 {
3802 if (IEM_IS_MODRM_REG_MODE(bRm))
3803 {
3804 /* XMM, greg64 */
3805 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3806 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3807 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3808 IEM_MC_ARG(const int64_t *, pi64Src, 1);
3809
3810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3811 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3812 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3813
3814 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3815 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i64, pr64Dst, pi64Src);
3816 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3817
3818 IEM_MC_ADVANCE_RIP_AND_FINISH();
3819 IEM_MC_END();
3820 }
3821 else
3822 {
3823 /* XMM, [mem64] */
3824 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3826 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3827 IEM_MC_LOCAL(int64_t, i64Src);
3828 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3829 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 1);
3830
3831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3833 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3834 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3835
3836 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3837 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i64, pr64Dst, pi64Src);
3838 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3839
3840 IEM_MC_ADVANCE_RIP_AND_FINISH();
3841 IEM_MC_END();
3842 }
3843 }
3844 else
3845 {
3846 if (IEM_IS_MODRM_REG_MODE(bRm))
3847 {
3848 /* XMM, greg32 */
3849 IEM_MC_BEGIN(0, 0);
3850 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3851 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3852 IEM_MC_ARG(const int32_t *, pi32Src, 1);
3853
3854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3855 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3856 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3857
3858 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3859 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i32, pr64Dst, pi32Src);
3860 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3861
3862 IEM_MC_ADVANCE_RIP_AND_FINISH();
3863 IEM_MC_END();
3864 }
3865 else
3866 {
3867 /* XMM, [mem32] */
3868 IEM_MC_BEGIN(0, 0);
3869 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3870 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3871 IEM_MC_LOCAL(int32_t, i32Src);
3872 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3873 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 1);
3874
3875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3877 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3878 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3879
3880 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3881 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i32, pr64Dst, pi32Src);
3882 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3883
3884 IEM_MC_ADVANCE_RIP_AND_FINISH();
3885 IEM_MC_END();
3886 }
3887 }
3888}
3889
3890
3891/**
3892 * @opcode 0x2b
3893 * @opcodesub !11 mr/reg
3894 * @oppfx none
3895 * @opcpuid sse
3896 * @opgroup og_sse1_cachect
3897 * @opxcpttype 1
3898 * @optest op1=1 op2=2 -> op1=2
3899 * @optest op1=0 op2=-42 -> op1=-42
3900 */
3901FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
3902{
3903 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3904 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3905 if (IEM_IS_MODRM_MEM_MODE(bRm))
3906 {
3907 /*
3908 * memory, register.
3909 */
3910 IEM_MC_BEGIN(0, 0);
3911 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3912 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3913
3914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3916 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3917 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3918
3919 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3920 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3921
3922 IEM_MC_ADVANCE_RIP_AND_FINISH();
3923 IEM_MC_END();
3924 }
3925 /* The register, register encoding is invalid. */
3926 else
3927 IEMOP_RAISE_INVALID_OPCODE_RET();
3928}
3929
3930/**
3931 * @opcode 0x2b
3932 * @opcodesub !11 mr/reg
3933 * @oppfx 0x66
3934 * @opcpuid sse2
3935 * @opgroup og_sse2_cachect
3936 * @opxcpttype 1
3937 * @optest op1=1 op2=2 -> op1=2
3938 * @optest op1=0 op2=-42 -> op1=-42
3939 */
3940FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
3941{
3942 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3943 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3944 if (IEM_IS_MODRM_MEM_MODE(bRm))
3945 {
3946 /*
3947 * memory, register.
3948 */
3949 IEM_MC_BEGIN(0, 0);
3950 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3952
3953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3955 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3956 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3957
3958 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3959 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3960
3961 IEM_MC_ADVANCE_RIP_AND_FINISH();
3962 IEM_MC_END();
3963 }
3964 /* The register, register encoding is invalid. */
3965 else
3966 IEMOP_RAISE_INVALID_OPCODE_RET();
3967}
3968/* Opcode 0xf3 0x0f 0x2b - invalid */
3969/* Opcode 0xf2 0x0f 0x2b - invalid */
3970
3971
3972/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
3973FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
3974{
3975 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3977 if (IEM_IS_MODRM_REG_MODE(bRm))
3978 {
3979 /*
3980 * Register, register.
3981 */
3982 IEM_MC_BEGIN(0, 0);
3983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3984 IEM_MC_LOCAL(uint64_t, u64Dst);
3985 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
3986 IEM_MC_ARG(uint64_t, u64Src, 1);
3987 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3988 IEM_MC_PREPARE_FPU_USAGE();
3989 IEM_MC_FPU_TO_MMX_MODE();
3990
3991 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
3992
3993 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttps2pi_u128, pu64Dst, u64Src);
3994 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
3995
3996 IEM_MC_ADVANCE_RIP_AND_FINISH();
3997 IEM_MC_END();
3998 }
3999 else
4000 {
4001 /*
4002 * Register, memory.
4003 */
4004 IEM_MC_BEGIN(0, 0);
4005 IEM_MC_LOCAL(uint64_t, u64Dst);
4006 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4007 IEM_MC_ARG(uint64_t, u64Src, 1);
4008 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4009
4010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4013 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4014
4015 IEM_MC_PREPARE_FPU_USAGE();
4016 IEM_MC_FPU_TO_MMX_MODE();
4017
4018 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttps2pi_u128, pu64Dst, u64Src);
4019 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4020
4021 IEM_MC_ADVANCE_RIP_AND_FINISH();
4022 IEM_MC_END();
4023 }
4024}
4025
4026
4027/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4028FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4029{
4030 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4032 if (IEM_IS_MODRM_REG_MODE(bRm))
4033 {
4034 /*
4035 * Register, register.
4036 */
4037 IEM_MC_BEGIN(0, 0);
4038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4039 IEM_MC_LOCAL(uint64_t, u64Dst);
4040 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4041 IEM_MC_ARG(PCX86XMMREG, pSrc, 1);
4042 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4043 IEM_MC_PREPARE_FPU_USAGE();
4044 IEM_MC_FPU_TO_MMX_MODE();
4045
4046 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4047
4048 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttpd2pi_u128, pu64Dst, pSrc);
4049 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4050
4051 IEM_MC_ADVANCE_RIP_AND_FINISH();
4052 IEM_MC_END();
4053 }
4054 else
4055 {
4056 /*
4057 * Register, memory.
4058 */
4059 IEM_MC_BEGIN(0, 0);
4060 IEM_MC_LOCAL(uint64_t, u64Dst);
4061 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4062 IEM_MC_LOCAL(X86XMMREG, uSrc);
4063 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 1);
4064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4065
4066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4068 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4069 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4070
4071 IEM_MC_PREPARE_FPU_USAGE();
4072 IEM_MC_FPU_TO_MMX_MODE();
4073
4074 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttpd2pi_u128, pu64Dst, pSrc);
4075 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4076
4077 IEM_MC_ADVANCE_RIP_AND_FINISH();
4078 IEM_MC_END();
4079 }
4080}
4081
4082
4083/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4084FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4085{
4086 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4087
4088 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4089 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4090 {
4091 if (IEM_IS_MODRM_REG_MODE(bRm))
4092 {
4093 /* greg64, XMM */
4094 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4095 IEM_MC_LOCAL(int64_t, i64Dst);
4096 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4097 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4098
4099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4100 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4101 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4102
4103 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4104 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i64_r32, pi64Dst, pu32Src);
4105 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4106
4107 IEM_MC_ADVANCE_RIP_AND_FINISH();
4108 IEM_MC_END();
4109 }
4110 else
4111 {
4112 /* greg64, [mem64] */
4113 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4115 IEM_MC_LOCAL(int64_t, i64Dst);
4116 IEM_MC_LOCAL(uint32_t, u32Src);
4117 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4118 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4119
4120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4122 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4123 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4124
4125 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4126 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i64_r32, pi64Dst, pu32Src);
4127 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4128
4129 IEM_MC_ADVANCE_RIP_AND_FINISH();
4130 IEM_MC_END();
4131 }
4132 }
4133 else
4134 {
4135 if (IEM_IS_MODRM_REG_MODE(bRm))
4136 {
4137 /* greg, XMM */
4138 IEM_MC_BEGIN(0, 0);
4139 IEM_MC_LOCAL(int32_t, i32Dst);
4140 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4141 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4142
4143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4144 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4145 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4146
4147 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4148 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i32_r32, pi32Dst, pu32Src);
4149 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4150
4151 IEM_MC_ADVANCE_RIP_AND_FINISH();
4152 IEM_MC_END();
4153 }
4154 else
4155 {
4156 /* greg, [mem] */
4157 IEM_MC_BEGIN(0, 0);
4158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4159 IEM_MC_LOCAL(int32_t, i32Dst);
4160 IEM_MC_LOCAL(uint32_t, u32Src);
4161 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4162 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4163
4164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4166 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4167 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4168
4169 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4170 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i32_r32, pi32Dst, pu32Src);
4171 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4172
4173 IEM_MC_ADVANCE_RIP_AND_FINISH();
4174 IEM_MC_END();
4175 }
4176 }
4177}
4178
4179
4180/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4181FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4182{
4183 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4184
4185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4186 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4187 {
4188 if (IEM_IS_MODRM_REG_MODE(bRm))
4189 {
4190 /* greg64, XMM */
4191 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4192 IEM_MC_LOCAL(int64_t, i64Dst);
4193 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4194 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4195
4196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4197 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4198 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4199
4200 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4201 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i64_r64, pi64Dst, pu64Src);
4202 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4203
4204 IEM_MC_ADVANCE_RIP_AND_FINISH();
4205 IEM_MC_END();
4206 }
4207 else
4208 {
4209 /* greg64, [mem64] */
4210 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4212 IEM_MC_LOCAL(int64_t, i64Dst);
4213 IEM_MC_LOCAL(uint64_t, u64Src);
4214 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4215 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4216
4217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4219 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4220 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4221
4222 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4223 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i64_r64, pi64Dst, pu64Src);
4224 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4225
4226 IEM_MC_ADVANCE_RIP_AND_FINISH();
4227 IEM_MC_END();
4228 }
4229 }
4230 else
4231 {
4232 if (IEM_IS_MODRM_REG_MODE(bRm))
4233 {
4234 /* greg, XMM */
4235 IEM_MC_BEGIN(0, 0);
4236 IEM_MC_LOCAL(int32_t, i32Dst);
4237 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4238 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4239
4240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4242 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4243
4244 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4245 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i32_r64, pi32Dst, pu64Src);
4246 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4247
4248 IEM_MC_ADVANCE_RIP_AND_FINISH();
4249 IEM_MC_END();
4250 }
4251 else
4252 {
4253 /* greg32, [mem32] */
4254 IEM_MC_BEGIN(0, 0);
4255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4256 IEM_MC_LOCAL(int32_t, i32Dst);
4257 IEM_MC_LOCAL(uint64_t, u64Src);
4258 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4259 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4260
4261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4263 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4264 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4265
4266 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4267 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i32_r64, pi32Dst, pu64Src);
4268 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4269
4270 IEM_MC_ADVANCE_RIP_AND_FINISH();
4271 IEM_MC_END();
4272 }
4273 }
4274}
4275
4276
4277/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4278FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4279{
4280 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4281 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4282 if (IEM_IS_MODRM_REG_MODE(bRm))
4283 {
4284 /*
4285 * Register, register.
4286 */
4287 IEM_MC_BEGIN(0, 0);
4288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4289 IEM_MC_LOCAL(uint64_t, u64Dst);
4290 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4291 IEM_MC_ARG(uint64_t, u64Src, 1);
4292
4293 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4294 IEM_MC_PREPARE_FPU_USAGE();
4295 IEM_MC_FPU_TO_MMX_MODE();
4296
4297 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4298
4299 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pi_u128, pu64Dst, u64Src);
4300 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4301
4302 IEM_MC_ADVANCE_RIP_AND_FINISH();
4303 IEM_MC_END();
4304 }
4305 else
4306 {
4307 /*
4308 * Register, memory.
4309 */
4310 IEM_MC_BEGIN(0, 0);
4311 IEM_MC_LOCAL(uint64_t, u64Dst);
4312 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4313 IEM_MC_ARG(uint64_t, u64Src, 1);
4314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4315
4316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4318 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4319 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4320
4321 IEM_MC_PREPARE_FPU_USAGE();
4322 IEM_MC_FPU_TO_MMX_MODE();
4323
4324 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pi_u128, pu64Dst, u64Src);
4325 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4326
4327 IEM_MC_ADVANCE_RIP_AND_FINISH();
4328 IEM_MC_END();
4329 }
4330}
4331
4332
4333/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4334FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4335{
4336 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4337 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4338 if (IEM_IS_MODRM_REG_MODE(bRm))
4339 {
4340 /*
4341 * Register, register.
4342 */
4343 IEM_MC_BEGIN(0, 0);
4344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4345 IEM_MC_LOCAL(uint64_t, u64Dst);
4346 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4347 IEM_MC_ARG(PCX86XMMREG, pSrc, 1);
4348
4349 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4350 IEM_MC_PREPARE_FPU_USAGE();
4351 IEM_MC_FPU_TO_MMX_MODE();
4352
4353 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4354
4355 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpd2pi_u128, pu64Dst, pSrc);
4356 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4357
4358 IEM_MC_ADVANCE_RIP_AND_FINISH();
4359 IEM_MC_END();
4360 }
4361 else
4362 {
4363 /*
4364 * Register, memory.
4365 */
4366 IEM_MC_BEGIN(0, 0);
4367 IEM_MC_LOCAL(uint64_t, u64Dst);
4368 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4369 IEM_MC_LOCAL(X86XMMREG, uSrc);
4370 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 1);
4371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4372
4373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4375 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4376 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4377
4378 IEM_MC_PREPARE_FPU_USAGE();
4379 IEM_MC_FPU_TO_MMX_MODE();
4380
4381 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpd2pi_u128, pu64Dst, pSrc);
4382 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4383
4384 IEM_MC_ADVANCE_RIP_AND_FINISH();
4385 IEM_MC_END();
4386 }
4387}
4388
4389
4390/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4391FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4392{
4393 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4394
4395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4396 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4397 {
4398 if (IEM_IS_MODRM_REG_MODE(bRm))
4399 {
4400 /* greg64, XMM */
4401 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4402 IEM_MC_LOCAL(int64_t, i64Dst);
4403 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4404 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4405
4406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4407 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4408 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4409
4410 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4411 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i64_r32, pi64Dst, pu32Src);
4412 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4413
4414 IEM_MC_ADVANCE_RIP_AND_FINISH();
4415 IEM_MC_END();
4416 }
4417 else
4418 {
4419 /* greg64, [mem64] */
4420 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4422 IEM_MC_LOCAL(int64_t, i64Dst);
4423 IEM_MC_LOCAL(uint32_t, u32Src);
4424 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4425 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4426
4427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4429 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4430 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4431
4432 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4433 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i64_r32, pi64Dst, pu32Src);
4434 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4435
4436 IEM_MC_ADVANCE_RIP_AND_FINISH();
4437 IEM_MC_END();
4438 }
4439 }
4440 else
4441 {
4442 if (IEM_IS_MODRM_REG_MODE(bRm))
4443 {
4444 /* greg, XMM */
4445 IEM_MC_BEGIN(0, 0);
4446 IEM_MC_LOCAL(int32_t, i32Dst);
4447 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4448 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4449
4450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4451 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4452 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4453
4454 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4455 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i32_r32, pi32Dst, pu32Src);
4456 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4457
4458 IEM_MC_ADVANCE_RIP_AND_FINISH();
4459 IEM_MC_END();
4460 }
4461 else
4462 {
4463 /* greg, [mem] */
4464 IEM_MC_BEGIN(0, 0);
4465 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4466 IEM_MC_LOCAL(int32_t, i32Dst);
4467 IEM_MC_LOCAL(uint32_t, u32Src);
4468 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4469 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4470
4471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4473 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4474 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4475
4476 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4477 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i32_r32, pi32Dst, pu32Src);
4478 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4479
4480 IEM_MC_ADVANCE_RIP_AND_FINISH();
4481 IEM_MC_END();
4482 }
4483 }
4484}
4485
4486
4487/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4488FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4489{
4490 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4491
4492 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4493 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4494 {
4495 if (IEM_IS_MODRM_REG_MODE(bRm))
4496 {
4497 /* greg64, XMM */
4498 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4499 IEM_MC_LOCAL(int64_t, i64Dst);
4500 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4501 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4502
4503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4504 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4505 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4506
4507 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4508 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i64_r64, pi64Dst, pu64Src);
4509 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4510
4511 IEM_MC_ADVANCE_RIP_AND_FINISH();
4512 IEM_MC_END();
4513 }
4514 else
4515 {
4516 /* greg64, [mem64] */
4517 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4519 IEM_MC_LOCAL(int64_t, i64Dst);
4520 IEM_MC_LOCAL(uint64_t, u64Src);
4521 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4522 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4523
4524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4526 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4527 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4528
4529 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4530 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i64_r64, pi64Dst, pu64Src);
4531 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4532
4533 IEM_MC_ADVANCE_RIP_AND_FINISH();
4534 IEM_MC_END();
4535 }
4536 }
4537 else
4538 {
4539 if (IEM_IS_MODRM_REG_MODE(bRm))
4540 {
4541 /* greg32, XMM */
4542 IEM_MC_BEGIN(0, 0);
4543 IEM_MC_LOCAL(int32_t, i32Dst);
4544 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4545 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4546
4547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4548 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4549 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4550
4551 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4552 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i32_r64, pi32Dst, pu64Src);
4553 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4554
4555 IEM_MC_ADVANCE_RIP_AND_FINISH();
4556 IEM_MC_END();
4557 }
4558 else
4559 {
4560 /* greg32, [mem64] */
4561 IEM_MC_BEGIN(0, 0);
4562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4563 IEM_MC_LOCAL(int32_t, i32Dst);
4564 IEM_MC_LOCAL(uint64_t, u64Src);
4565 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4566 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4567
4568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4570 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4571 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4572
4573 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4574 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i32_r64, pi32Dst, pu64Src);
4575 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4576
4577 IEM_MC_ADVANCE_RIP_AND_FINISH();
4578 IEM_MC_END();
4579 }
4580 }
4581}
4582
4583
4584/**
4585 * @opcode 0x2e
4586 * @oppfx none
4587 * @opflmodify cf,pf,af,zf,sf,of
4588 * @opflclear af,sf,of
4589 */
4590FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4591{
4592 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4593 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4594 if (IEM_IS_MODRM_REG_MODE(bRm))
4595 {
4596 /*
4597 * Register, register.
4598 */
4599 IEM_MC_BEGIN(0, 0);
4600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4601 IEM_MC_LOCAL(uint32_t, fEFlags);
4602 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4603 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4604 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4605 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4606 IEM_MC_PREPARE_SSE_USAGE();
4607 IEM_MC_FETCH_EFLAGS(fEFlags);
4608 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4609 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
4610 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomiss_u128, pEFlags, uSrc1, uSrc2);
4611 IEM_MC_COMMIT_EFLAGS(fEFlags);
4612
4613 IEM_MC_ADVANCE_RIP_AND_FINISH();
4614 IEM_MC_END();
4615 }
4616 else
4617 {
4618 /*
4619 * Register, memory.
4620 */
4621 IEM_MC_BEGIN(0, 0);
4622 IEM_MC_LOCAL(uint32_t, fEFlags);
4623 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4624 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4625 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4627
4628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4630 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4631 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4632
4633 IEM_MC_PREPARE_SSE_USAGE();
4634 IEM_MC_FETCH_EFLAGS(fEFlags);
4635 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4636 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomiss_u128, pEFlags, uSrc1, uSrc2);
4637 IEM_MC_COMMIT_EFLAGS(fEFlags);
4638
4639 IEM_MC_ADVANCE_RIP_AND_FINISH();
4640 IEM_MC_END();
4641 }
4642}
4643
4644
4645/**
4646 * @opcode 0x2e
4647 * @oppfx 0x66
4648 * @opflmodify cf,pf,af,zf,sf,of
4649 * @opflclear af,sf,of
4650 */
4651FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4652{
4653 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4654 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4655 if (IEM_IS_MODRM_REG_MODE(bRm))
4656 {
4657 /*
4658 * Register, register.
4659 */
4660 IEM_MC_BEGIN(0, 0);
4661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4662 IEM_MC_LOCAL(uint32_t, fEFlags);
4663 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4664 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4665 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4666 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4667 IEM_MC_PREPARE_SSE_USAGE();
4668 IEM_MC_FETCH_EFLAGS(fEFlags);
4669 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4670 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
4671 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomisd_u128, pEFlags, uSrc1, uSrc2);
4672 IEM_MC_COMMIT_EFLAGS(fEFlags);
4673
4674 IEM_MC_ADVANCE_RIP_AND_FINISH();
4675 IEM_MC_END();
4676 }
4677 else
4678 {
4679 /*
4680 * Register, memory.
4681 */
4682 IEM_MC_BEGIN(0, 0);
4683 IEM_MC_LOCAL(uint32_t, fEFlags);
4684 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4685 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4686 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4688
4689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4691 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4692 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4693
4694 IEM_MC_PREPARE_SSE_USAGE();
4695 IEM_MC_FETCH_EFLAGS(fEFlags);
4696 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4697 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomisd_u128, pEFlags, uSrc1, uSrc2);
4698 IEM_MC_COMMIT_EFLAGS(fEFlags);
4699
4700 IEM_MC_ADVANCE_RIP_AND_FINISH();
4701 IEM_MC_END();
4702 }
4703}
4704
4705
4706/* Opcode 0xf3 0x0f 0x2e - invalid */
4707/* Opcode 0xf2 0x0f 0x2e - invalid */
4708
4709
4710/**
4711 * @opcode 0x2e
4712 * @oppfx none
4713 * @opflmodify cf,pf,af,zf,sf,of
4714 * @opflclear af,sf,of
4715 */
4716FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
4717{
4718 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4720 if (IEM_IS_MODRM_REG_MODE(bRm))
4721 {
4722 /*
4723 * Register, register.
4724 */
4725 IEM_MC_BEGIN(0, 0);
4726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4727 IEM_MC_LOCAL(uint32_t, fEFlags);
4728 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4729 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4730 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4731 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4732 IEM_MC_PREPARE_SSE_USAGE();
4733 IEM_MC_FETCH_EFLAGS(fEFlags);
4734 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4735 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
4736 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comiss_u128, pEFlags, uSrc1, uSrc2);
4737 IEM_MC_COMMIT_EFLAGS(fEFlags);
4738
4739 IEM_MC_ADVANCE_RIP_AND_FINISH();
4740 IEM_MC_END();
4741 }
4742 else
4743 {
4744 /*
4745 * Register, memory.
4746 */
4747 IEM_MC_BEGIN(0, 0);
4748 IEM_MC_LOCAL(uint32_t, fEFlags);
4749 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4750 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4751 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4753
4754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4756 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4757 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4758
4759 IEM_MC_PREPARE_SSE_USAGE();
4760 IEM_MC_FETCH_EFLAGS(fEFlags);
4761 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4762 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comiss_u128, pEFlags, uSrc1, uSrc2);
4763 IEM_MC_COMMIT_EFLAGS(fEFlags);
4764
4765 IEM_MC_ADVANCE_RIP_AND_FINISH();
4766 IEM_MC_END();
4767 }
4768}
4769
4770
4771/**
4772 * @opcode 0x2f
4773 * @oppfx 0x66
4774 * @opflmodify cf,pf,af,zf,sf,of
4775 * @opflclear af,sf,of
4776 */
4777FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
4778{
4779 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4781 if (IEM_IS_MODRM_REG_MODE(bRm))
4782 {
4783 /*
4784 * Register, register.
4785 */
4786 IEM_MC_BEGIN(0, 0);
4787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4788 IEM_MC_LOCAL(uint32_t, fEFlags);
4789 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4790 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4791 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4792 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4793 IEM_MC_PREPARE_SSE_USAGE();
4794 IEM_MC_FETCH_EFLAGS(fEFlags);
4795 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4796 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
4797 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comisd_u128, pEFlags, uSrc1, uSrc2);
4798 IEM_MC_COMMIT_EFLAGS(fEFlags);
4799
4800 IEM_MC_ADVANCE_RIP_AND_FINISH();
4801 IEM_MC_END();
4802 }
4803 else
4804 {
4805 /*
4806 * Register, memory.
4807 */
4808 IEM_MC_BEGIN(0, 0);
4809 IEM_MC_LOCAL(uint32_t, fEFlags);
4810 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4811 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4812 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4814
4815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4817 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4818 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4819
4820 IEM_MC_PREPARE_SSE_USAGE();
4821 IEM_MC_FETCH_EFLAGS(fEFlags);
4822 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4823 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comisd_u128, pEFlags, uSrc1, uSrc2);
4824 IEM_MC_COMMIT_EFLAGS(fEFlags);
4825
4826 IEM_MC_ADVANCE_RIP_AND_FINISH();
4827 IEM_MC_END();
4828 }
4829}
4830
4831
4832/* Opcode 0xf3 0x0f 0x2f - invalid */
4833/* Opcode 0xf2 0x0f 0x2f - invalid */
4834
4835/** Opcode 0x0f 0x30. */
4836FNIEMOP_DEF(iemOp_wrmsr)
4837{
4838 IEMOP_MNEMONIC(wrmsr, "wrmsr");
4839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4840 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wrmsr);
4841}
4842
4843
4844/** Opcode 0x0f 0x31. */
4845FNIEMOP_DEF(iemOp_rdtsc)
4846{
4847 IEMOP_MNEMONIC(rdtsc, "rdtsc");
4848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4849 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4850 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4851 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4852 iemCImpl_rdtsc);
4853}
4854
4855
4856/** Opcode 0x0f 0x33. */
4857FNIEMOP_DEF(iemOp_rdmsr)
4858{
4859 IEMOP_MNEMONIC(rdmsr, "rdmsr");
4860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4861 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4862 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4863 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4864 iemCImpl_rdmsr);
4865}
4866
4867
4868/** Opcode 0x0f 0x34. */
4869FNIEMOP_DEF(iemOp_rdpmc)
4870{
4871 IEMOP_MNEMONIC(rdpmc, "rdpmc");
4872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4873 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4874 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4875 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4876 iemCImpl_rdpmc);
4877}
4878
4879
4880/** Opcode 0x0f 0x34. */
4881FNIEMOP_DEF(iemOp_sysenter)
4882{
4883 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4885 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
4886 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
4887 iemCImpl_sysenter);
4888}
4889
4890/** Opcode 0x0f 0x35. */
4891FNIEMOP_DEF(iemOp_sysexit)
4892{
4893 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4895 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
4896 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
4897 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
4898}
4899
4900/** Opcode 0x0f 0x37. */
4901FNIEMOP_STUB(iemOp_getsec);
4902
4903
4904/** Opcode 0x0f 0x38. */
4905FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
4906{
4907#ifdef IEM_WITH_THREE_0F_38
4908 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4909 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4910#else
4911 IEMOP_BITCH_ABOUT_STUB();
4912 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4913#endif
4914}
4915
4916
4917/** Opcode 0x0f 0x3a. */
4918FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
4919{
4920#ifdef IEM_WITH_THREE_0F_3A
4921 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4922 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4923#else
4924 IEMOP_BITCH_ABOUT_STUB();
4925 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4926#endif
4927}
4928
4929
4930/**
4931 * Implements a conditional move.
4932 *
4933 * Wish there was an obvious way to do this where we could share and reduce
4934 * code bloat.
4935 *
4936 * @param a_Cnd The conditional "microcode" operation.
4937 */
4938#define CMOV_X(a_Cnd) \
4939 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4940 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4941 { \
4942 switch (pVCpu->iem.s.enmEffOpSize) \
4943 { \
4944 case IEMMODE_16BIT: \
4945 IEM_MC_BEGIN(0, 0); \
4946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4947 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4948 a_Cnd { \
4949 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4950 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4951 } IEM_MC_ENDIF(); \
4952 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4953 IEM_MC_END(); \
4954 break; \
4955 \
4956 case IEMMODE_32BIT: \
4957 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4959 IEM_MC_LOCAL(uint32_t, u32Tmp); \
4960 a_Cnd { \
4961 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4962 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
4963 } IEM_MC_ELSE() { \
4964 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
4965 } IEM_MC_ENDIF(); \
4966 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4967 IEM_MC_END(); \
4968 break; \
4969 \
4970 case IEMMODE_64BIT: \
4971 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4973 IEM_MC_LOCAL(uint64_t, u64Tmp); \
4974 a_Cnd { \
4975 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4976 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
4977 } IEM_MC_ENDIF(); \
4978 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4979 IEM_MC_END(); \
4980 break; \
4981 \
4982 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4983 } \
4984 } \
4985 else \
4986 { \
4987 switch (pVCpu->iem.s.enmEffOpSize) \
4988 { \
4989 case IEMMODE_16BIT: \
4990 IEM_MC_BEGIN(0, 0); \
4991 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4992 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4993 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4995 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4996 a_Cnd { \
4997 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4998 } IEM_MC_ENDIF(); \
4999 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5000 IEM_MC_END(); \
5001 break; \
5002 \
5003 case IEMMODE_32BIT: \
5004 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5006 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5009 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5010 a_Cnd { \
5011 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5012 } IEM_MC_ELSE() { \
5013 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5014 } IEM_MC_ENDIF(); \
5015 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5016 IEM_MC_END(); \
5017 break; \
5018 \
5019 case IEMMODE_64BIT: \
5020 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5022 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5025 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5026 a_Cnd { \
5027 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5028 } IEM_MC_ENDIF(); \
5029 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5030 IEM_MC_END(); \
5031 break; \
5032 \
5033 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5034 } \
5035 } do {} while (0)
5036
5037
5038
5039/**
5040 * @opcode 0x40
5041 * @opfltest of
5042 */
5043FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5044{
5045 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5046 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5047}
5048
5049
5050/**
5051 * @opcode 0x41
5052 * @opfltest of
5053 */
5054FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5055{
5056 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5057 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5058}
5059
5060
5061/**
5062 * @opcode 0x42
5063 * @opfltest cf
5064 */
5065FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5066{
5067 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5068 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5069}
5070
5071
5072/**
5073 * @opcode 0x43
5074 * @opfltest cf
5075 */
5076FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5077{
5078 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5079 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5080}
5081
5082
5083/**
5084 * @opcode 0x44
5085 * @opfltest zf
5086 */
5087FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5088{
5089 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5090 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5091}
5092
5093
5094/**
5095 * @opcode 0x45
5096 * @opfltest zf
5097 */
5098FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5099{
5100 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5101 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5102}
5103
5104
5105/**
5106 * @opcode 0x46
5107 * @opfltest cf,zf
5108 */
5109FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5110{
5111 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5112 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5113}
5114
5115
5116/**
5117 * @opcode 0x47
5118 * @opfltest cf,zf
5119 */
5120FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5121{
5122 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5123 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5124}
5125
5126
5127/**
5128 * @opcode 0x48
5129 * @opfltest sf
5130 */
5131FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5132{
5133 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5134 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5135}
5136
5137
5138/**
5139 * @opcode 0x49
5140 * @opfltest sf
5141 */
5142FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5143{
5144 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5145 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5146}
5147
5148
5149/**
5150 * @opcode 0x4a
5151 * @opfltest pf
5152 */
5153FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5154{
5155 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5156 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5157}
5158
5159
5160/**
5161 * @opcode 0x4b
5162 * @opfltest pf
5163 */
5164FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5165{
5166 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5167 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5168}
5169
5170
5171/**
5172 * @opcode 0x4c
5173 * @opfltest sf,of
5174 */
5175FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5176{
5177 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5178 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5179}
5180
5181
5182/**
5183 * @opcode 0x4d
5184 * @opfltest sf,of
5185 */
5186FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5187{
5188 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5189 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5190}
5191
5192
5193/**
5194 * @opcode 0x4e
5195 * @opfltest zf,sf,of
5196 */
5197FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5198{
5199 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5200 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5201}
5202
5203
5204/**
5205 * @opcode 0x4e
5206 * @opfltest zf,sf,of
5207 */
5208FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5209{
5210 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5211 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5212}
5213
5214#undef CMOV_X
5215
5216/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5217FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5218{
5219 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5221 if (IEM_IS_MODRM_REG_MODE(bRm))
5222 {
5223 /*
5224 * Register, register.
5225 */
5226 IEM_MC_BEGIN(0, 0);
5227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5228 IEM_MC_LOCAL(uint8_t, u8Dst);
5229 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5230 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5231 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5232 IEM_MC_PREPARE_SSE_USAGE();
5233 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5234 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5235 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5236 IEM_MC_ADVANCE_RIP_AND_FINISH();
5237 IEM_MC_END();
5238 }
5239 /* No memory operand. */
5240 else
5241 IEMOP_RAISE_INVALID_OPCODE_RET();
5242}
5243
5244
5245/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5246FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5247{
5248 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5249 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5250 if (IEM_IS_MODRM_REG_MODE(bRm))
5251 {
5252 /*
5253 * Register, register.
5254 */
5255 IEM_MC_BEGIN(0, 0);
5256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5257 IEM_MC_LOCAL(uint8_t, u8Dst);
5258 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5259 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5260 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5261 IEM_MC_PREPARE_SSE_USAGE();
5262 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5263 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5264 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5265 IEM_MC_ADVANCE_RIP_AND_FINISH();
5266 IEM_MC_END();
5267 }
5268 /* No memory operand. */
5269 else
5270 IEMOP_RAISE_INVALID_OPCODE_RET();
5271
5272}
5273
5274
5275/* Opcode 0xf3 0x0f 0x50 - invalid */
5276/* Opcode 0xf2 0x0f 0x50 - invalid */
5277
5278
5279/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5280FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5281{
5282 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5283 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5284}
5285
5286
5287/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5288FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5289{
5290 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5291 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5292}
5293
5294
5295/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5296FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5297{
5298 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5299 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5300}
5301
5302
5303/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5304FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5305{
5306 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5307 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5308}
5309
5310
5311/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5312FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5313{
5314 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5315 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5316}
5317
5318
5319/* Opcode 0x66 0x0f 0x52 - invalid */
5320
5321
5322/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5323FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5324{
5325 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5326 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5327}
5328
5329
5330/* Opcode 0xf2 0x0f 0x52 - invalid */
5331
5332
5333/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5334FNIEMOP_DEF(iemOp_rcpps_Vps_Wps)
5335{
5336 IEMOP_MNEMONIC2(RM, RCPPS, rcpps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5337 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rcpps_u128);
5338}
5339
5340
5341/* Opcode 0x66 0x0f 0x53 - invalid */
5342
5343
5344/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5345FNIEMOP_DEF(iemOp_rcpss_Vss_Wss)
5346{
5347 IEMOP_MNEMONIC2(RM, RCPSS, rcpss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5348 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rcpss_u128_r32);
5349}
5350
5351
5352/* Opcode 0xf2 0x0f 0x53 - invalid */
5353
5354
5355/** Opcode 0x0f 0x54 - andps Vps, Wps */
5356FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5357{
5358 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5359 SSE2_OPT_BODY_FullFull_To_Full(pand, iemAImpl_pand_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5360}
5361
5362
5363/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5364FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5365{
5366 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5367 SSE2_OPT_BODY_FullFull_To_Full(pand, iemAImpl_pand_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5368}
5369
5370
5371/* Opcode 0xf3 0x0f 0x54 - invalid */
5372/* Opcode 0xf2 0x0f 0x54 - invalid */
5373
5374
5375/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5376FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5377{
5378 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5379 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_pandn_u128);
5380}
5381
5382
5383/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5384FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5385{
5386 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5387 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pandn_u128);
5388}
5389
5390
5391/* Opcode 0xf3 0x0f 0x55 - invalid */
5392/* Opcode 0xf2 0x0f 0x55 - invalid */
5393
5394
5395/** Opcode 0x0f 0x56 - orps Vps, Wps */
5396FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5397{
5398 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5399 SSE2_OPT_BODY_FullFull_To_Full(por, iemAImpl_por_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5400}
5401
5402
5403/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5404FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5405{
5406 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5407 SSE2_OPT_BODY_FullFull_To_Full(por, iemAImpl_por_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5408}
5409
5410
5411/* Opcode 0xf3 0x0f 0x56 - invalid */
5412/* Opcode 0xf2 0x0f 0x56 - invalid */
5413
5414
5415/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5416FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5417{
5418 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5419 SSE2_OPT_BODY_FullFull_To_Full(pxor, iemAImpl_pxor_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5420}
5421
5422
5423/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5424FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5425{
5426 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5427 SSE2_OPT_BODY_FullFull_To_Full(pxor, iemAImpl_pxor_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5428}
5429
5430
5431/* Opcode 0xf3 0x0f 0x57 - invalid */
5432/* Opcode 0xf2 0x0f 0x57 - invalid */
5433
5434/** Opcode 0x0f 0x58 - addps Vps, Wps */
5435FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5436{
5437 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5438 SSE_FP_BODY_FullFull_To_Full(addps, iemAImpl_addps_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5439}
5440
5441
5442/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5443FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5444{
5445 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5446 SSE_FP_BODY_FullFull_To_Full(addpd, iemAImpl_addpd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5447}
5448
5449
5450/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5451FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5452{
5453 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5454 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5455}
5456
5457
5458/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5459FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5460{
5461 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5462 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5463}
5464
5465
5466/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5467FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5468{
5469 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5470 SSE_FP_BODY_FullFull_To_Full(mulps, iemAImpl_mulps_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5471}
5472
5473
5474/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5475FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5476{
5477 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5478 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5479}
5480
5481
5482/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5483FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5484{
5485 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5486 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5487}
5488
5489
5490/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5491FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5492{
5493 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5494 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5495}
5496
5497
5498/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5499FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5500{
5501 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd_WO, Wps, DISOPTYPE_HARMLESS, 0);
5502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5503 if (IEM_IS_MODRM_REG_MODE(bRm))
5504 {
5505 /*
5506 * XMM, XMM[63:0].
5507 */
5508 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
5509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5510 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5511 IEM_MC_PREPARE_SSE_USAGE();
5512
5513 IEM_MC_LOCAL(X86XMMREG, SseRes);
5514 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
5515 IEM_MC_ARG(uint64_t const *, pu64Src, 1); /* The input is actually two 32-bit float values, */
5516 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); /* but we've got no matching type or MC. */
5517 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pd_u128, pSseRes, pu64Src);
5518 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
5519
5520 IEM_MC_ADVANCE_RIP_AND_FINISH();
5521 IEM_MC_END();
5522 }
5523 else
5524 {
5525 /*
5526 * XMM, [mem64].
5527 */
5528 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
5529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5532 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5533
5534 IEM_MC_LOCAL(uint64_t, u64Src);
5535 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pu64Src, u64Src, 1); /* (see comment above wrt type) */
5536 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5537
5538 IEM_MC_PREPARE_SSE_USAGE();
5539 IEM_MC_LOCAL(X86XMMREG, SseRes);
5540 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
5541 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pd_u128, pSseRes, pu64Src);
5542 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
5543
5544 IEM_MC_ADVANCE_RIP_AND_FINISH();
5545 IEM_MC_END();
5546 }
5547}
5548
5549
5550/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5551FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5552{
5553 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps_WO, Wpd, DISOPTYPE_HARMLESS, 0);
5554 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5555 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5556}
5557
5558
5559/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5560FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5561{
5562 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5563 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5564}
5565
5566
5567/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5568FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5569{
5570 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5571 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5572}
5573
5574
5575/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5576FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5577{
5578 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5579 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5580 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5581}
5582
5583
5584/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5585FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5586{
5587 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5588 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5589 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5590}
5591
5592
5593/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5594FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5595{
5596 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5597 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5598 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5599}
5600
5601
5602/* Opcode 0xf2 0x0f 0x5b - invalid */
5603
5604
5605/** Opcode 0x0f 0x5c - subps Vps, Wps */
5606FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5607{
5608 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5609 SSE_FP_BODY_FullFull_To_Full(subps, iemAImpl_subps_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5610}
5611
5612
5613/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5614FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5615{
5616 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5617 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5618}
5619
5620
5621/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5622FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5623{
5624 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5625 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5626}
5627
5628
5629/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5630FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5631{
5632 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5633 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5634}
5635
5636
5637/** Opcode 0x0f 0x5d - minps Vps, Wps */
5638FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5639{
5640 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5641 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5642}
5643
5644
5645/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5646FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5647{
5648 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5649 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5650}
5651
5652
5653/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5654FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5655{
5656 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5657 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5658}
5659
5660
5661/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5662FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5663{
5664 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5665 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5666}
5667
5668
5669/** Opcode 0x0f 0x5e - divps Vps, Wps */
5670FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5671{
5672 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5673 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5674}
5675
5676
5677/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5678FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5679{
5680 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5681 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5682}
5683
5684
5685/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5686FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5687{
5688 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5689 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5690}
5691
5692
5693/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5694FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5695{
5696 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5697 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5698}
5699
5700
5701/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5702FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5703{
5704 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5705 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5706}
5707
5708
5709/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5710FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5711{
5712 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5713 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5714}
5715
5716
5717/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5718FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5719{
5720 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5721 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5722}
5723
5724
5725/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5726FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5727{
5728 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5729 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5730}
5731
5732
5733/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5734FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5735{
5736 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5737 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5738}
5739
5740
5741/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5742FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5743{
5744 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5745 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5746}
5747
5748
5749/* Opcode 0xf3 0x0f 0x60 - invalid */
5750
5751
5752/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5753FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5754{
5755 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5756 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5757 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5758}
5759
5760
5761/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5762FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5763{
5764 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5765 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5766}
5767
5768
5769/* Opcode 0xf3 0x0f 0x61 - invalid */
5770
5771
5772/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5773FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5774{
5775 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5776 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5777}
5778
5779
5780/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5781FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5782{
5783 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5784 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5785}
5786
5787
5788/* Opcode 0xf3 0x0f 0x62 - invalid */
5789
5790
5791
5792/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5793FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5794{
5795 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5796 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5797}
5798
5799
5800/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5801FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5802{
5803 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5804 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5805}
5806
5807
5808/* Opcode 0xf3 0x0f 0x63 - invalid */
5809
5810
5811/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5812FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5813{
5814 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5815 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5816}
5817
5818
5819/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5820FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5821{
5822 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5823 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
5824}
5825
5826
5827/* Opcode 0xf3 0x0f 0x64 - invalid */
5828
5829
5830/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
5831FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
5832{
5833 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5834 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
5835}
5836
5837
5838/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
5839FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
5840{
5841 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5842 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
5843}
5844
5845
5846/* Opcode 0xf3 0x0f 0x65 - invalid */
5847
5848
5849/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
5850FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
5851{
5852 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5853 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
5854}
5855
5856
5857/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
5858FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
5859{
5860 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5861 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
5862}
5863
5864
5865/* Opcode 0xf3 0x0f 0x66 - invalid */
5866
5867
5868/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
5869FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
5870{
5871 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5872 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
5873}
5874
5875
5876/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
5877FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
5878{
5879 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5880 SSE2_OPT_BODY_FullFull_To_Full(packuswb, iemAImpl_packuswb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5881}
5882
5883
5884/* Opcode 0xf3 0x0f 0x67 - invalid */
5885
5886
5887/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
5888 * @note Intel and AMD both uses Qd for the second parameter, however they
5889 * both list it as a mmX/mem64 operand and intel describes it as being
5890 * loaded as a qword, so it should be Qq, shouldn't it? */
5891FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
5892{
5893 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5894 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
5895}
5896
5897
5898/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
5899FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
5900{
5901 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5902 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
5903}
5904
5905
5906/* Opcode 0xf3 0x0f 0x68 - invalid */
5907
5908
5909/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
5910 * @note Intel and AMD both uses Qd for the second parameter, however they
5911 * both list it as a mmX/mem64 operand and intel describes it as being
5912 * loaded as a qword, so it should be Qq, shouldn't it? */
5913FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
5914{
5915 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5916 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
5917}
5918
5919
5920/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
5921FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
5922{
5923 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5924 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
5925
5926}
5927
5928
5929/* Opcode 0xf3 0x0f 0x69 - invalid */
5930
5931
5932/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
5933 * @note Intel and AMD both uses Qd for the second parameter, however they
5934 * both list it as a mmX/mem64 operand and intel describes it as being
5935 * loaded as a qword, so it should be Qq, shouldn't it? */
5936FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
5937{
5938 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5939 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
5940}
5941
5942
5943/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
5944FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
5945{
5946 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5947 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
5948}
5949
5950
5951/* Opcode 0xf3 0x0f 0x6a - invalid */
5952
5953
5954/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
5955FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
5956{
5957 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5958 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
5959}
5960
5961
5962/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
5963FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
5964{
5965 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5966 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
5967}
5968
5969
5970/* Opcode 0xf3 0x0f 0x6b - invalid */
5971
5972
5973/* Opcode 0x0f 0x6c - invalid */
5974
5975
5976/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
5977FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
5978{
5979 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5980 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
5981}
5982
5983
5984/* Opcode 0xf3 0x0f 0x6c - invalid */
5985/* Opcode 0xf2 0x0f 0x6c - invalid */
5986
5987
5988/* Opcode 0x0f 0x6d - invalid */
5989
5990
5991/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
5992FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
5993{
5994 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5995 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
5996}
5997
5998
5999/* Opcode 0xf3 0x0f 0x6d - invalid */
6000
6001
6002FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6003{
6004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6005 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6006 {
6007 /**
6008 * @opcode 0x6e
6009 * @opcodesub rex.w=1
6010 * @oppfx none
6011 * @opcpuid mmx
6012 * @opgroup og_mmx_datamove
6013 * @opxcpttype 5
6014 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6015 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6016 */
6017 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6018 if (IEM_IS_MODRM_REG_MODE(bRm))
6019 {
6020 /* MMX, greg64 */
6021 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6023 IEM_MC_LOCAL(uint64_t, u64Tmp);
6024
6025 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6026 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6027 IEM_MC_FPU_TO_MMX_MODE();
6028
6029 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6030 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6031
6032 IEM_MC_ADVANCE_RIP_AND_FINISH();
6033 IEM_MC_END();
6034 }
6035 else
6036 {
6037 /* MMX, [mem64] */
6038 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6040 IEM_MC_LOCAL(uint64_t, u64Tmp);
6041
6042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6044 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6045 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6046
6047 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6048 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6049 IEM_MC_FPU_TO_MMX_MODE();
6050
6051 IEM_MC_ADVANCE_RIP_AND_FINISH();
6052 IEM_MC_END();
6053 }
6054 }
6055 else
6056 {
6057 /**
6058 * @opdone
6059 * @opcode 0x6e
6060 * @opcodesub rex.w=0
6061 * @oppfx none
6062 * @opcpuid mmx
6063 * @opgroup og_mmx_datamove
6064 * @opxcpttype 5
6065 * @opfunction iemOp_movd_q_Pd_Ey
6066 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6067 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6068 */
6069 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6070 if (IEM_IS_MODRM_REG_MODE(bRm))
6071 {
6072 /* MMX, greg32 */
6073 IEM_MC_BEGIN(0, 0);
6074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6075 IEM_MC_LOCAL(uint32_t, u32Tmp);
6076
6077 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6078 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6079 IEM_MC_FPU_TO_MMX_MODE();
6080
6081 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6082 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6083
6084 IEM_MC_ADVANCE_RIP_AND_FINISH();
6085 IEM_MC_END();
6086 }
6087 else
6088 {
6089 /* MMX, [mem32] */
6090 IEM_MC_BEGIN(0, 0);
6091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6092 IEM_MC_LOCAL(uint32_t, u32Tmp);
6093
6094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6096 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6097 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6098
6099 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6100 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6101 IEM_MC_FPU_TO_MMX_MODE();
6102
6103 IEM_MC_ADVANCE_RIP_AND_FINISH();
6104 IEM_MC_END();
6105 }
6106 }
6107}
6108
6109FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6110{
6111 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6112 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6113 {
6114 /**
6115 * @opcode 0x6e
6116 * @opcodesub rex.w=1
6117 * @oppfx 0x66
6118 * @opcpuid sse2
6119 * @opgroup og_sse2_simdint_datamove
6120 * @opxcpttype 5
6121 * @optest 64-bit / op1=1 op2=2 -> op1=2
6122 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6123 */
6124 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6125 if (IEM_IS_MODRM_REG_MODE(bRm))
6126 {
6127 /* XMM, greg64 */
6128 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6130 IEM_MC_LOCAL(uint64_t, u64Tmp);
6131
6132 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6133 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6134
6135 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6136 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6137
6138 IEM_MC_ADVANCE_RIP_AND_FINISH();
6139 IEM_MC_END();
6140 }
6141 else
6142 {
6143 /* XMM, [mem64] */
6144 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6146 IEM_MC_LOCAL(uint64_t, u64Tmp);
6147
6148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6150 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6151 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6152
6153 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6154 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6155
6156 IEM_MC_ADVANCE_RIP_AND_FINISH();
6157 IEM_MC_END();
6158 }
6159 }
6160 else
6161 {
6162 /**
6163 * @opdone
6164 * @opcode 0x6e
6165 * @opcodesub rex.w=0
6166 * @oppfx 0x66
6167 * @opcpuid sse2
6168 * @opgroup og_sse2_simdint_datamove
6169 * @opxcpttype 5
6170 * @opfunction iemOp_movd_q_Vy_Ey
6171 * @optest op1=1 op2=2 -> op1=2
6172 * @optest op1=0 op2=-42 -> op1=-42
6173 */
6174 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6175 if (IEM_IS_MODRM_REG_MODE(bRm))
6176 {
6177 /* XMM, greg32 */
6178 IEM_MC_BEGIN(0, 0);
6179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6180 IEM_MC_LOCAL(uint32_t, u32Tmp);
6181
6182 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6183 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6184
6185 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6186 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6187
6188 IEM_MC_ADVANCE_RIP_AND_FINISH();
6189 IEM_MC_END();
6190 }
6191 else
6192 {
6193 /* XMM, [mem32] */
6194 IEM_MC_BEGIN(0, 0);
6195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6196 IEM_MC_LOCAL(uint32_t, u32Tmp);
6197
6198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6200 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6201 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6202
6203 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6204 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6205
6206 IEM_MC_ADVANCE_RIP_AND_FINISH();
6207 IEM_MC_END();
6208 }
6209 }
6210}
6211
6212/* Opcode 0xf3 0x0f 0x6e - invalid */
6213
6214
6215/**
6216 * @opcode 0x6f
6217 * @oppfx none
6218 * @opcpuid mmx
6219 * @opgroup og_mmx_datamove
6220 * @opxcpttype 5
6221 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6222 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6223 */
6224FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6225{
6226 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6227 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6228 if (IEM_IS_MODRM_REG_MODE(bRm))
6229 {
6230 /*
6231 * Register, register.
6232 */
6233 IEM_MC_BEGIN(0, 0);
6234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6235 IEM_MC_LOCAL(uint64_t, u64Tmp);
6236
6237 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6238 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6239 IEM_MC_FPU_TO_MMX_MODE();
6240
6241 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6242 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6243
6244 IEM_MC_ADVANCE_RIP_AND_FINISH();
6245 IEM_MC_END();
6246 }
6247 else
6248 {
6249 /*
6250 * Register, memory.
6251 */
6252 IEM_MC_BEGIN(0, 0);
6253 IEM_MC_LOCAL(uint64_t, u64Tmp);
6254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6255
6256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6258 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6259 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6260
6261 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6262 IEM_MC_FPU_TO_MMX_MODE();
6263
6264 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6265
6266 IEM_MC_ADVANCE_RIP_AND_FINISH();
6267 IEM_MC_END();
6268 }
6269}
6270
6271/**
6272 * @opcode 0x6f
6273 * @oppfx 0x66
6274 * @opcpuid sse2
6275 * @opgroup og_sse2_simdint_datamove
6276 * @opxcpttype 1
6277 * @optest op1=1 op2=2 -> op1=2
6278 * @optest op1=0 op2=-42 -> op1=-42
6279 */
6280FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6281{
6282 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6283 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6284 if (IEM_IS_MODRM_REG_MODE(bRm))
6285 {
6286 /*
6287 * Register, register.
6288 */
6289 IEM_MC_BEGIN(0, 0);
6290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6291
6292 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6294
6295 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6296 IEM_GET_MODRM_RM(pVCpu, bRm));
6297 IEM_MC_ADVANCE_RIP_AND_FINISH();
6298 IEM_MC_END();
6299 }
6300 else
6301 {
6302 /*
6303 * Register, memory.
6304 */
6305 IEM_MC_BEGIN(0, 0);
6306 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6308
6309 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6311 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6312 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6313
6314 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6315 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6316
6317 IEM_MC_ADVANCE_RIP_AND_FINISH();
6318 IEM_MC_END();
6319 }
6320}
6321
6322/**
6323 * @opcode 0x6f
6324 * @oppfx 0xf3
6325 * @opcpuid sse2
6326 * @opgroup og_sse2_simdint_datamove
6327 * @opxcpttype 4UA
6328 * @optest op1=1 op2=2 -> op1=2
6329 * @optest op1=0 op2=-42 -> op1=-42
6330 */
6331FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6332{
6333 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6334 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6335 if (IEM_IS_MODRM_REG_MODE(bRm))
6336 {
6337 /*
6338 * Register, register.
6339 */
6340 IEM_MC_BEGIN(0, 0);
6341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6342 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6343 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6344 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6345 IEM_GET_MODRM_RM(pVCpu, bRm));
6346 IEM_MC_ADVANCE_RIP_AND_FINISH();
6347 IEM_MC_END();
6348 }
6349 else
6350 {
6351 /*
6352 * Register, memory.
6353 */
6354 IEM_MC_BEGIN(0, 0);
6355 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6357
6358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6360 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6361 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6362 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6363 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6364
6365 IEM_MC_ADVANCE_RIP_AND_FINISH();
6366 IEM_MC_END();
6367 }
6368}
6369
6370
6371/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6372FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6373{
6374 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6375 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6376 if (IEM_IS_MODRM_REG_MODE(bRm))
6377 {
6378 /*
6379 * Register, register.
6380 */
6381 IEM_MC_BEGIN(0, 0);
6382 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6384 IEM_MC_ARG(uint64_t *, pDst, 0);
6385 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6386 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6387 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6388 IEM_MC_PREPARE_FPU_USAGE();
6389 IEM_MC_FPU_TO_MMX_MODE();
6390
6391 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6392 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6393 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6394 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6395
6396 IEM_MC_ADVANCE_RIP_AND_FINISH();
6397 IEM_MC_END();
6398 }
6399 else
6400 {
6401 /*
6402 * Register, memory.
6403 */
6404 IEM_MC_BEGIN(0, 0);
6405 IEM_MC_ARG(uint64_t *, pDst, 0);
6406 IEM_MC_LOCAL(uint64_t, uSrc);
6407 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6409
6410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6411 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6412 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6414 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6415 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6416
6417 IEM_MC_PREPARE_FPU_USAGE();
6418 IEM_MC_FPU_TO_MMX_MODE();
6419
6420 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6421 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6422 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6423
6424 IEM_MC_ADVANCE_RIP_AND_FINISH();
6425 IEM_MC_END();
6426 }
6427}
6428
6429
6430/**
6431 * Common worker for SSE2 instructions on the forms:
6432 * pshufd xmm1, xmm2/mem128, imm8
6433 * pshufhw xmm1, xmm2/mem128, imm8
6434 * pshuflw xmm1, xmm2/mem128, imm8
6435 *
6436 * Proper alignment of the 128-bit operand is enforced.
6437 * Exceptions type 4. SSE2 cpuid checks.
6438 */
6439FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6440{
6441 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6442 if (IEM_IS_MODRM_REG_MODE(bRm))
6443 {
6444 /*
6445 * Register, register.
6446 */
6447 IEM_MC_BEGIN(0, 0);
6448 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6450 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6451 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6452 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6453 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6454 IEM_MC_PREPARE_SSE_USAGE();
6455 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6456 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6457 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6458 IEM_MC_ADVANCE_RIP_AND_FINISH();
6459 IEM_MC_END();
6460 }
6461 else
6462 {
6463 /*
6464 * Register, memory.
6465 */
6466 IEM_MC_BEGIN(0, 0);
6467 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6468 IEM_MC_LOCAL(RTUINT128U, uSrc);
6469 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6471
6472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6473 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6474 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6477
6478 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6479 IEM_MC_PREPARE_SSE_USAGE();
6480 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6481 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6482
6483 IEM_MC_ADVANCE_RIP_AND_FINISH();
6484 IEM_MC_END();
6485 }
6486}
6487
6488
6489/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6490FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6491{
6492 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6493 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6494}
6495
6496
6497/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6498FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6499{
6500 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6501 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6502}
6503
6504
6505/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6506FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6507{
6508 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6509 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6510}
6511
6512
6513/**
6514 * Common worker for MMX instructions of the form:
6515 * psrlw mm, imm8
6516 * psraw mm, imm8
6517 * psllw mm, imm8
6518 * psrld mm, imm8
6519 * psrad mm, imm8
6520 * pslld mm, imm8
6521 * psrlq mm, imm8
6522 * psllq mm, imm8
6523 *
6524 */
6525FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6526{
6527 if (IEM_IS_MODRM_REG_MODE(bRm))
6528 {
6529 /*
6530 * Register, immediate.
6531 */
6532 IEM_MC_BEGIN(0, 0);
6533 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6535 IEM_MC_ARG(uint64_t *, pDst, 0);
6536 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6537 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6538 IEM_MC_PREPARE_FPU_USAGE();
6539 IEM_MC_FPU_TO_MMX_MODE();
6540
6541 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6542 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6543 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6544
6545 IEM_MC_ADVANCE_RIP_AND_FINISH();
6546 IEM_MC_END();
6547 }
6548 else
6549 {
6550 /*
6551 * Register, memory not supported.
6552 */
6553 /// @todo Caller already enforced register mode?!
6554 AssertFailedReturn(VINF_SUCCESS);
6555 }
6556}
6557
6558
6559#if 0 /*unused*/
6560/**
6561 * Common worker for SSE2 instructions of the form:
6562 * psrlw xmm, imm8
6563 * psraw xmm, imm8
6564 * psllw xmm, imm8
6565 * psrld xmm, imm8
6566 * psrad xmm, imm8
6567 * pslld xmm, imm8
6568 * psrlq xmm, imm8
6569 * psllq xmm, imm8
6570 *
6571 */
6572FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6573{
6574 if (IEM_IS_MODRM_REG_MODE(bRm))
6575 {
6576 /*
6577 * Register, immediate.
6578 */
6579 IEM_MC_BEGIN(0, 0);
6580 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6582 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6583 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6584 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6585 IEM_MC_PREPARE_SSE_USAGE();
6586 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6587 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6588 IEM_MC_ADVANCE_RIP_AND_FINISH();
6589 IEM_MC_END();
6590 }
6591 else
6592 {
6593 /*
6594 * Register, memory.
6595 */
6596 /// @todo Caller already enforced register mode?!
6597 AssertFailedReturn(VINF_SUCCESS);
6598 }
6599}
6600#endif
6601
6602
6603/**
6604 * Preprocessor macro variant of iemOpCommonSse2_Shift_Imm
6605 */
6606#define SSE2_SHIFT_BODY_Imm(a_Ins, a_bRm, a_fRegNativeArchs) \
6607 if (IEM_IS_MODRM_REG_MODE((a_bRm))) \
6608 { \
6609 /* \
6610 * Register, immediate. \
6611 */ \
6612 IEM_MC_BEGIN(0, 0); \
6613 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
6614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2); \
6615 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
6616 IEM_MC_PREPARE_SSE_USAGE(); \
6617 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
6618 IEM_MC_NATIVE_EMIT_2(RT_CONCAT3(iemNativeEmit_,a_Ins,_ri_u128), IEM_GET_MODRM_RM(pVCpu, (a_bRm)), bImm); \
6619 } IEM_MC_NATIVE_ELSE() { \
6620 IEM_MC_ARG(PRTUINT128U, pDst, 0); \
6621 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1); \
6622 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, (a_bRm))); \
6623 IEM_MC_CALL_VOID_AIMPL_2(RT_CONCAT3(iemAImpl_,a_Ins,_imm_u128), pDst, bShiftArg); \
6624 } IEM_MC_NATIVE_ENDIF(); \
6625 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6626 IEM_MC_END(); \
6627 } \
6628 else \
6629 { \
6630 /* \
6631 * Register, memory. \
6632 */ \
6633 AssertFailedReturn(VINF_SUCCESS); \
6634 } (void)0
6635
6636
6637/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6638FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6639{
6640// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6641 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6642}
6643
6644
6645/** Opcode 0x66 0x0f 0x71 11/2. */
6646FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6647{
6648// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6649 SSE2_SHIFT_BODY_Imm(psrlw, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6650}
6651
6652
6653/** Opcode 0x0f 0x71 11/4. */
6654FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6655{
6656// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6657 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6658}
6659
6660
6661/** Opcode 0x66 0x0f 0x71 11/4. */
6662FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6663{
6664// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6665 SSE2_SHIFT_BODY_Imm(psraw, bRm, 0);
6666}
6667
6668
6669/** Opcode 0x0f 0x71 11/6. */
6670FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6671{
6672// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6673 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6674}
6675
6676
6677/** Opcode 0x66 0x0f 0x71 11/6. */
6678FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6679{
6680// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6681 SSE2_SHIFT_BODY_Imm(psllw, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6682}
6683
6684
6685/**
6686 * Group 12 jump table for register variant.
6687 */
6688IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6689{
6690 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6691 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6692 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6693 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6694 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6695 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6696 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6697 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6698};
6699AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6700
6701
6702/** Opcode 0x0f 0x71. */
6703FNIEMOP_DEF(iemOp_Grp12)
6704{
6705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6706 if (IEM_IS_MODRM_REG_MODE(bRm))
6707 /* register, register */
6708 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6709 + pVCpu->iem.s.idxPrefix], bRm);
6710 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6711}
6712
6713
6714/** Opcode 0x0f 0x72 11/2. */
6715FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6716{
6717// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6718 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6719}
6720
6721
6722/** Opcode 0x66 0x0f 0x72 11/2. */
6723FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6724{
6725// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6726 SSE2_SHIFT_BODY_Imm(psrld, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6727}
6728
6729
6730/** Opcode 0x0f 0x72 11/4. */
6731FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6732{
6733// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6734 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6735}
6736
6737
6738/** Opcode 0x66 0x0f 0x72 11/4. */
6739FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6740{
6741// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6742 SSE2_SHIFT_BODY_Imm(psrad, bRm, 0);
6743}
6744
6745
6746/** Opcode 0x0f 0x72 11/6. */
6747FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6748{
6749// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6750 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6751}
6752
6753/** Opcode 0x66 0x0f 0x72 11/6. */
6754FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6755{
6756// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6757 SSE2_SHIFT_BODY_Imm(pslld, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6758}
6759
6760
6761/**
6762 * Group 13 jump table for register variant.
6763 */
6764IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6765{
6766 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6767 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6768 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6769 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6770 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6771 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6772 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6773 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6774};
6775AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6776
6777/** Opcode 0x0f 0x72. */
6778FNIEMOP_DEF(iemOp_Grp13)
6779{
6780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6781 if (IEM_IS_MODRM_REG_MODE(bRm))
6782 /* register, register */
6783 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6784 + pVCpu->iem.s.idxPrefix], bRm);
6785 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6786}
6787
6788
6789/** Opcode 0x0f 0x73 11/2. */
6790FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6791{
6792// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6793 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6794}
6795
6796
6797/** Opcode 0x66 0x0f 0x73 11/2. */
6798FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6799{
6800// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6801 SSE2_SHIFT_BODY_Imm(psrlq, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6802}
6803
6804
6805/** Opcode 0x66 0x0f 0x73 11/3. */
6806FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6807{
6808// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6809 SSE2_SHIFT_BODY_Imm(psrldq, bRm, 0);
6810}
6811
6812
6813/** Opcode 0x0f 0x73 11/6. */
6814FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6815{
6816// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6817 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6818}
6819
6820
6821/** Opcode 0x66 0x0f 0x73 11/6. */
6822FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6823{
6824// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6825 SSE2_SHIFT_BODY_Imm(psllq, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6826}
6827
6828
6829/** Opcode 0x66 0x0f 0x73 11/7. */
6830FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6831{
6832// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6833 SSE2_SHIFT_BODY_Imm(pslldq, bRm, 0);
6834}
6835
6836/**
6837 * Group 14 jump table for register variant.
6838 */
6839IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6840{
6841 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6842 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6843 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6844 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6845 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6846 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6847 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6848 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6849};
6850AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6851
6852
6853/** Opcode 0x0f 0x73. */
6854FNIEMOP_DEF(iemOp_Grp14)
6855{
6856 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6857 if (IEM_IS_MODRM_REG_MODE(bRm))
6858 /* register, register */
6859 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6860 + pVCpu->iem.s.idxPrefix], bRm);
6861 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6862}
6863
6864
6865/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
6866FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
6867{
6868 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6869 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
6870}
6871
6872
6873/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
6874FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
6875{
6876 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6877 SSE2_OPT_BODY_FullFull_To_Full(pcmpeqb, iemAImpl_pcmpeqb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6878}
6879
6880
6881/* Opcode 0xf3 0x0f 0x74 - invalid */
6882/* Opcode 0xf2 0x0f 0x74 - invalid */
6883
6884
6885/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
6886FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
6887{
6888 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6889 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
6890}
6891
6892
6893/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
6894FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
6895{
6896 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6897 SSE2_OPT_BODY_FullFull_To_Full(pcmpeqw, iemAImpl_pcmpeqw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6898}
6899
6900
6901/* Opcode 0xf3 0x0f 0x75 - invalid */
6902/* Opcode 0xf2 0x0f 0x75 - invalid */
6903
6904
6905/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
6906FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
6907{
6908 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6909 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
6910}
6911
6912
6913/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
6914FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
6915{
6916 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6917 SSE2_OPT_BODY_FullFull_To_Full(pcmpeqd, iemAImpl_pcmpeqd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6918}
6919
6920
6921/* Opcode 0xf3 0x0f 0x76 - invalid */
6922/* Opcode 0xf2 0x0f 0x76 - invalid */
6923
6924
6925/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
6926FNIEMOP_DEF(iemOp_emms)
6927{
6928 IEMOP_MNEMONIC(emms, "emms");
6929 IEM_MC_BEGIN(0, 0);
6930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6931 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6932 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6933 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6934 IEM_MC_FPU_FROM_MMX_MODE();
6935 IEM_MC_ADVANCE_RIP_AND_FINISH();
6936 IEM_MC_END();
6937}
6938
6939/* Opcode 0x66 0x0f 0x77 - invalid */
6940/* Opcode 0xf3 0x0f 0x77 - invalid */
6941/* Opcode 0xf2 0x0f 0x77 - invalid */
6942
6943/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
6944#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6945FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
6946{
6947 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
6948 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
6949 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
6950 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
6951
6952 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6953 if (IEM_IS_MODRM_REG_MODE(bRm))
6954 {
6955 /*
6956 * Register, register.
6957 */
6958 if (enmEffOpSize == IEMMODE_64BIT)
6959 {
6960 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6961 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6962 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6963 IEM_MC_ARG(uint64_t, u64Enc, 1);
6964 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6965 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6966 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
6967 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
6968 iemCImpl_vmread_reg64, pu64Dst, u64Enc);
6969 IEM_MC_END();
6970 }
6971 else
6972 {
6973 IEM_MC_BEGIN(0, 0);
6974 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6975 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6976 IEM_MC_ARG(uint32_t, u32Enc, 1);
6977 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6978 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6979 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
6980 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
6981 iemCImpl_vmread_reg32, pu64Dst, u32Enc);
6982 IEM_MC_END();
6983 }
6984 }
6985 else
6986 {
6987 /*
6988 * Memory, register.
6989 */
6990 if (enmEffOpSize == IEMMODE_64BIT)
6991 {
6992 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6993 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6995 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6996 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
6997 IEM_MC_ARG(uint64_t, u64Enc, 2);
6998 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6999 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7000 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7001 IEM_MC_END();
7002 }
7003 else
7004 {
7005 IEM_MC_BEGIN(0, 0);
7006 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7008 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7009 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7010 IEM_MC_ARG(uint32_t, u32Enc, 2);
7011 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7012 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7013 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7014 IEM_MC_END();
7015 }
7016 }
7017}
7018#else
7019FNIEMOP_UD_STUB(iemOp_vmread_Ey_Gy);
7020#endif
7021
7022/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7023FNIEMOP_STUB(iemOp_AmdGrp17);
7024/* Opcode 0xf3 0x0f 0x78 - invalid */
7025/* Opcode 0xf2 0x0f 0x78 - invalid */
7026
7027/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7028#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7029FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7030{
7031 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7032 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7033 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7034 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7035
7036 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7037 if (IEM_IS_MODRM_REG_MODE(bRm))
7038 {
7039 /*
7040 * Register, register.
7041 */
7042 if (enmEffOpSize == IEMMODE_64BIT)
7043 {
7044 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7045 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7046 IEM_MC_ARG(uint64_t, u64Val, 0);
7047 IEM_MC_ARG(uint64_t, u64Enc, 1);
7048 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7049 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7050 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u64Val, u64Enc);
7051 IEM_MC_END();
7052 }
7053 else
7054 {
7055 IEM_MC_BEGIN(0, 0);
7056 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7057 IEM_MC_ARG(uint32_t, u32Val, 0);
7058 IEM_MC_ARG(uint32_t, u32Enc, 1);
7059 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7060 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7061 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u32Val, u32Enc);
7062 IEM_MC_END();
7063 }
7064 }
7065 else
7066 {
7067 /*
7068 * Register, memory.
7069 */
7070 if (enmEffOpSize == IEMMODE_64BIT)
7071 {
7072 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7073 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7075 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7076 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7077 IEM_MC_ARG(uint64_t, u64Enc, 2);
7078 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7079 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7080 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7081 IEM_MC_END();
7082 }
7083 else
7084 {
7085 IEM_MC_BEGIN(0, 0);
7086 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7088 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7089 IEM_MC_ARG(uint32_t, u32Enc, 2);
7090 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7091 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7092 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7093 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7094 IEM_MC_END();
7095 }
7096 }
7097}
7098#else
7099FNIEMOP_UD_STUB(iemOp_vmwrite_Gy_Ey);
7100#endif
7101/* Opcode 0x66 0x0f 0x79 - invalid */
7102/* Opcode 0xf3 0x0f 0x79 - invalid */
7103/* Opcode 0xf2 0x0f 0x79 - invalid */
7104
7105/* Opcode 0x0f 0x7a - invalid */
7106/* Opcode 0x66 0x0f 0x7a - invalid */
7107/* Opcode 0xf3 0x0f 0x7a - invalid */
7108/* Opcode 0xf2 0x0f 0x7a - invalid */
7109
7110/* Opcode 0x0f 0x7b - invalid */
7111/* Opcode 0x66 0x0f 0x7b - invalid */
7112/* Opcode 0xf3 0x0f 0x7b - invalid */
7113/* Opcode 0xf2 0x0f 0x7b - invalid */
7114
7115/* Opcode 0x0f 0x7c - invalid */
7116
7117
7118/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7119FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7120{
7121 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7122 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7123}
7124
7125
7126/* Opcode 0xf3 0x0f 0x7c - invalid */
7127
7128
7129/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7130FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7131{
7132 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7133 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7134}
7135
7136
7137/* Opcode 0x0f 0x7d - invalid */
7138
7139
7140/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7141FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7142{
7143 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7144 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7145}
7146
7147
7148/* Opcode 0xf3 0x0f 0x7d - invalid */
7149
7150
7151/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7152FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7153{
7154 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7155 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7156}
7157
7158
7159/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7160FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7161{
7162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7163 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7164 {
7165 /**
7166 * @opcode 0x7e
7167 * @opcodesub rex.w=1
7168 * @oppfx none
7169 * @opcpuid mmx
7170 * @opgroup og_mmx_datamove
7171 * @opxcpttype 5
7172 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7173 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7174 */
7175 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7176 if (IEM_IS_MODRM_REG_MODE(bRm))
7177 {
7178 /* greg64, MMX */
7179 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7181 IEM_MC_LOCAL(uint64_t, u64Tmp);
7182
7183 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7184 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7185 IEM_MC_FPU_TO_MMX_MODE();
7186
7187 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7188 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7189
7190 IEM_MC_ADVANCE_RIP_AND_FINISH();
7191 IEM_MC_END();
7192 }
7193 else
7194 {
7195 /* [mem64], MMX */
7196 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7198 IEM_MC_LOCAL(uint64_t, u64Tmp);
7199
7200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7202 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7203 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7204
7205 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7206 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7207 IEM_MC_FPU_TO_MMX_MODE();
7208
7209 IEM_MC_ADVANCE_RIP_AND_FINISH();
7210 IEM_MC_END();
7211 }
7212 }
7213 else
7214 {
7215 /**
7216 * @opdone
7217 * @opcode 0x7e
7218 * @opcodesub rex.w=0
7219 * @oppfx none
7220 * @opcpuid mmx
7221 * @opgroup og_mmx_datamove
7222 * @opxcpttype 5
7223 * @opfunction iemOp_movd_q_Pd_Ey
7224 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7225 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7226 */
7227 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7228 if (IEM_IS_MODRM_REG_MODE(bRm))
7229 {
7230 /* greg32, MMX */
7231 IEM_MC_BEGIN(0, 0);
7232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7233 IEM_MC_LOCAL(uint32_t, u32Tmp);
7234
7235 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7236 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7237 IEM_MC_FPU_TO_MMX_MODE();
7238
7239 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm), 0);
7240 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7241
7242 IEM_MC_ADVANCE_RIP_AND_FINISH();
7243 IEM_MC_END();
7244 }
7245 else
7246 {
7247 /* [mem32], MMX */
7248 IEM_MC_BEGIN(0, 0);
7249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7250 IEM_MC_LOCAL(uint32_t, u32Tmp);
7251
7252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7254 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7255 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7256
7257 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm), 0);
7258 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7259 IEM_MC_FPU_TO_MMX_MODE();
7260
7261 IEM_MC_ADVANCE_RIP_AND_FINISH();
7262 IEM_MC_END();
7263 }
7264 }
7265}
7266
7267
7268FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7269{
7270 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7271 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7272 {
7273 /**
7274 * @opcode 0x7e
7275 * @opcodesub rex.w=1
7276 * @oppfx 0x66
7277 * @opcpuid sse2
7278 * @opgroup og_sse2_simdint_datamove
7279 * @opxcpttype 5
7280 * @optest 64-bit / op1=1 op2=2 -> op1=2
7281 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7282 */
7283 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7284 if (IEM_IS_MODRM_REG_MODE(bRm))
7285 {
7286 /* greg64, XMM */
7287 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7289 IEM_MC_LOCAL(uint64_t, u64Tmp);
7290
7291 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7292 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7293
7294 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7295 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7296
7297 IEM_MC_ADVANCE_RIP_AND_FINISH();
7298 IEM_MC_END();
7299 }
7300 else
7301 {
7302 /* [mem64], XMM */
7303 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7305 IEM_MC_LOCAL(uint64_t, u64Tmp);
7306
7307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7309 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7310 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7311
7312 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7313 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7314
7315 IEM_MC_ADVANCE_RIP_AND_FINISH();
7316 IEM_MC_END();
7317 }
7318 }
7319 else
7320 {
7321 /**
7322 * @opdone
7323 * @opcode 0x7e
7324 * @opcodesub rex.w=0
7325 * @oppfx 0x66
7326 * @opcpuid sse2
7327 * @opgroup og_sse2_simdint_datamove
7328 * @opxcpttype 5
7329 * @opfunction iemOp_movd_q_Vy_Ey
7330 * @optest op1=1 op2=2 -> op1=2
7331 * @optest op1=0 op2=-42 -> op1=-42
7332 */
7333 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7334 if (IEM_IS_MODRM_REG_MODE(bRm))
7335 {
7336 /* greg32, XMM */
7337 IEM_MC_BEGIN(0, 0);
7338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7339 IEM_MC_LOCAL(uint32_t, u32Tmp);
7340
7341 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7342 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7343
7344 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7345 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7346
7347 IEM_MC_ADVANCE_RIP_AND_FINISH();
7348 IEM_MC_END();
7349 }
7350 else
7351 {
7352 /* [mem32], XMM */
7353 IEM_MC_BEGIN(0, 0);
7354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7355 IEM_MC_LOCAL(uint32_t, u32Tmp);
7356
7357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7359 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7360 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7361
7362 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7363 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7364
7365 IEM_MC_ADVANCE_RIP_AND_FINISH();
7366 IEM_MC_END();
7367 }
7368 }
7369}
7370
7371/**
7372 * @opcode 0x7e
7373 * @oppfx 0xf3
7374 * @opcpuid sse2
7375 * @opgroup og_sse2_pcksclr_datamove
7376 * @opxcpttype none
7377 * @optest op1=1 op2=2 -> op1=2
7378 * @optest op1=0 op2=-42 -> op1=-42
7379 */
7380FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7381{
7382 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7383 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7384 if (IEM_IS_MODRM_REG_MODE(bRm))
7385 {
7386 /*
7387 * XMM128, XMM64.
7388 */
7389 IEM_MC_BEGIN(0, 0);
7390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7391 IEM_MC_LOCAL(uint64_t, uSrc);
7392
7393 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7394 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7395
7396 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7397 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7398
7399 IEM_MC_ADVANCE_RIP_AND_FINISH();
7400 IEM_MC_END();
7401 }
7402 else
7403 {
7404 /*
7405 * XMM128, [mem64].
7406 */
7407 IEM_MC_BEGIN(0, 0);
7408 IEM_MC_LOCAL(uint64_t, uSrc);
7409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7410
7411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7413 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7414 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7415
7416 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7417 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7418
7419 IEM_MC_ADVANCE_RIP_AND_FINISH();
7420 IEM_MC_END();
7421 }
7422}
7423
7424/* Opcode 0xf2 0x0f 0x7e - invalid */
7425
7426
7427/** Opcode 0x0f 0x7f - movq Qq, Pq */
7428FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7429{
7430 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7431 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7432 if (IEM_IS_MODRM_REG_MODE(bRm))
7433 {
7434 /*
7435 * MMX, MMX.
7436 */
7437 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7438 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7439 IEM_MC_BEGIN(0, 0);
7440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7441 IEM_MC_LOCAL(uint64_t, u64Tmp);
7442 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7443 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7444 IEM_MC_FPU_TO_MMX_MODE();
7445
7446 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7447 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7448
7449 IEM_MC_ADVANCE_RIP_AND_FINISH();
7450 IEM_MC_END();
7451 }
7452 else
7453 {
7454 /*
7455 * [mem64], MMX.
7456 */
7457 IEM_MC_BEGIN(0, 0);
7458 IEM_MC_LOCAL(uint64_t, u64Tmp);
7459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7460
7461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7463 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7464 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7465
7466 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7467 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7468 IEM_MC_FPU_TO_MMX_MODE();
7469
7470 IEM_MC_ADVANCE_RIP_AND_FINISH();
7471 IEM_MC_END();
7472 }
7473}
7474
7475/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7476FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7477{
7478 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7479 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7480 if (IEM_IS_MODRM_REG_MODE(bRm))
7481 {
7482 /*
7483 * XMM, XMM.
7484 */
7485 IEM_MC_BEGIN(0, 0);
7486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7487 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7488 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7489 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7490 IEM_GET_MODRM_REG(pVCpu, bRm));
7491 IEM_MC_ADVANCE_RIP_AND_FINISH();
7492 IEM_MC_END();
7493 }
7494 else
7495 {
7496 /*
7497 * [mem128], XMM.
7498 */
7499 IEM_MC_BEGIN(0, 0);
7500 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7502
7503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7505 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7506 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7507
7508 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7509 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7510
7511 IEM_MC_ADVANCE_RIP_AND_FINISH();
7512 IEM_MC_END();
7513 }
7514}
7515
7516/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7517FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7518{
7519 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7521 if (IEM_IS_MODRM_REG_MODE(bRm))
7522 {
7523 /*
7524 * XMM, XMM.
7525 */
7526 IEM_MC_BEGIN(0, 0);
7527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7528 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7529 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7530 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7531 IEM_GET_MODRM_REG(pVCpu, bRm));
7532 IEM_MC_ADVANCE_RIP_AND_FINISH();
7533 IEM_MC_END();
7534 }
7535 else
7536 {
7537 /*
7538 * [mem128], XMM.
7539 */
7540 IEM_MC_BEGIN(0, 0);
7541 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7543
7544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7546 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7547 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7548
7549 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7550 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7551
7552 IEM_MC_ADVANCE_RIP_AND_FINISH();
7553 IEM_MC_END();
7554 }
7555}
7556
7557/* Opcode 0xf2 0x0f 0x7f - invalid */
7558
7559
7560/**
7561 * @opcode 0x80
7562 * @opfltest of
7563 */
7564FNIEMOP_DEF(iemOp_jo_Jv)
7565{
7566 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7567 IEMOP_HLP_MIN_386();
7568 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7569 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7570 {
7571 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7572 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7574 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7575 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7576 } IEM_MC_ELSE() {
7577 IEM_MC_ADVANCE_RIP_AND_FINISH();
7578 } IEM_MC_ENDIF();
7579 IEM_MC_END();
7580 }
7581 else
7582 {
7583 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7584 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7586 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7587 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7588 } IEM_MC_ELSE() {
7589 IEM_MC_ADVANCE_RIP_AND_FINISH();
7590 } IEM_MC_ENDIF();
7591 IEM_MC_END();
7592 }
7593}
7594
7595
7596/**
7597 * @opcode 0x81
7598 * @opfltest of
7599 */
7600FNIEMOP_DEF(iemOp_jno_Jv)
7601{
7602 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7603 IEMOP_HLP_MIN_386();
7604 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7605 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7606 {
7607 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7608 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7610 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7611 IEM_MC_ADVANCE_RIP_AND_FINISH();
7612 } IEM_MC_ELSE() {
7613 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7614 } IEM_MC_ENDIF();
7615 IEM_MC_END();
7616 }
7617 else
7618 {
7619 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7620 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7622 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7623 IEM_MC_ADVANCE_RIP_AND_FINISH();
7624 } IEM_MC_ELSE() {
7625 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7626 } IEM_MC_ENDIF();
7627 IEM_MC_END();
7628 }
7629}
7630
7631
7632/**
7633 * @opcode 0x82
7634 * @opfltest cf
7635 */
7636FNIEMOP_DEF(iemOp_jc_Jv)
7637{
7638 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7639 IEMOP_HLP_MIN_386();
7640 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7641 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7642 {
7643 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7644 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7646 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7647 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7648 } IEM_MC_ELSE() {
7649 IEM_MC_ADVANCE_RIP_AND_FINISH();
7650 } IEM_MC_ENDIF();
7651 IEM_MC_END();
7652 }
7653 else
7654 {
7655 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7656 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7658 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7659 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7660 } IEM_MC_ELSE() {
7661 IEM_MC_ADVANCE_RIP_AND_FINISH();
7662 } IEM_MC_ENDIF();
7663 IEM_MC_END();
7664 }
7665}
7666
7667
7668/**
7669 * @opcode 0x83
7670 * @opfltest cf
7671 */
7672FNIEMOP_DEF(iemOp_jnc_Jv)
7673{
7674 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7675 IEMOP_HLP_MIN_386();
7676 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7677 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7678 {
7679 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7680 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7682 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7683 IEM_MC_ADVANCE_RIP_AND_FINISH();
7684 } IEM_MC_ELSE() {
7685 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7686 } IEM_MC_ENDIF();
7687 IEM_MC_END();
7688 }
7689 else
7690 {
7691 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7692 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7694 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7695 IEM_MC_ADVANCE_RIP_AND_FINISH();
7696 } IEM_MC_ELSE() {
7697 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7698 } IEM_MC_ENDIF();
7699 IEM_MC_END();
7700 }
7701}
7702
7703
7704/**
7705 * @opcode 0x84
7706 * @opfltest zf
7707 */
7708FNIEMOP_DEF(iemOp_je_Jv)
7709{
7710 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7711 IEMOP_HLP_MIN_386();
7712 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7713 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7714 {
7715 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7716 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7718 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7719 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7720 } IEM_MC_ELSE() {
7721 IEM_MC_ADVANCE_RIP_AND_FINISH();
7722 } IEM_MC_ENDIF();
7723 IEM_MC_END();
7724 }
7725 else
7726 {
7727 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7728 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7730 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7731 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7732 } IEM_MC_ELSE() {
7733 IEM_MC_ADVANCE_RIP_AND_FINISH();
7734 } IEM_MC_ENDIF();
7735 IEM_MC_END();
7736 }
7737}
7738
7739
7740/**
7741 * @opcode 0x85
7742 * @opfltest zf
7743 */
7744FNIEMOP_DEF(iemOp_jne_Jv)
7745{
7746 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7747 IEMOP_HLP_MIN_386();
7748 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7749 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7750 {
7751 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7752 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7754 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7755 IEM_MC_ADVANCE_RIP_AND_FINISH();
7756 } IEM_MC_ELSE() {
7757 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7758 } IEM_MC_ENDIF();
7759 IEM_MC_END();
7760 }
7761 else
7762 {
7763 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7764 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7766 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7767 IEM_MC_ADVANCE_RIP_AND_FINISH();
7768 } IEM_MC_ELSE() {
7769 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7770 } IEM_MC_ENDIF();
7771 IEM_MC_END();
7772 }
7773}
7774
7775
7776/**
7777 * @opcode 0x86
7778 * @opfltest cf,zf
7779 */
7780FNIEMOP_DEF(iemOp_jbe_Jv)
7781{
7782 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7783 IEMOP_HLP_MIN_386();
7784 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7785 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7786 {
7787 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7788 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7790 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7791 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7792 } IEM_MC_ELSE() {
7793 IEM_MC_ADVANCE_RIP_AND_FINISH();
7794 } IEM_MC_ENDIF();
7795 IEM_MC_END();
7796 }
7797 else
7798 {
7799 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7800 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7802 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7803 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7804 } IEM_MC_ELSE() {
7805 IEM_MC_ADVANCE_RIP_AND_FINISH();
7806 } IEM_MC_ENDIF();
7807 IEM_MC_END();
7808 }
7809}
7810
7811
7812/**
7813 * @opcode 0x87
7814 * @opfltest cf,zf
7815 */
7816FNIEMOP_DEF(iemOp_jnbe_Jv)
7817{
7818 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7819 IEMOP_HLP_MIN_386();
7820 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7821 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7822 {
7823 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7824 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7826 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7827 IEM_MC_ADVANCE_RIP_AND_FINISH();
7828 } IEM_MC_ELSE() {
7829 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7830 } IEM_MC_ENDIF();
7831 IEM_MC_END();
7832 }
7833 else
7834 {
7835 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7836 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7838 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7839 IEM_MC_ADVANCE_RIP_AND_FINISH();
7840 } IEM_MC_ELSE() {
7841 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7842 } IEM_MC_ENDIF();
7843 IEM_MC_END();
7844 }
7845}
7846
7847
7848/**
7849 * @opcode 0x88
7850 * @opfltest sf
7851 */
7852FNIEMOP_DEF(iemOp_js_Jv)
7853{
7854 IEMOP_MNEMONIC(js_Jv, "js Jv");
7855 IEMOP_HLP_MIN_386();
7856 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7857 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7858 {
7859 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7860 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7862 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7863 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7864 } IEM_MC_ELSE() {
7865 IEM_MC_ADVANCE_RIP_AND_FINISH();
7866 } IEM_MC_ENDIF();
7867 IEM_MC_END();
7868 }
7869 else
7870 {
7871 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7872 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7874 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7875 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7876 } IEM_MC_ELSE() {
7877 IEM_MC_ADVANCE_RIP_AND_FINISH();
7878 } IEM_MC_ENDIF();
7879 IEM_MC_END();
7880 }
7881}
7882
7883
7884/**
7885 * @opcode 0x89
7886 * @opfltest sf
7887 */
7888FNIEMOP_DEF(iemOp_jns_Jv)
7889{
7890 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
7891 IEMOP_HLP_MIN_386();
7892 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7893 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7894 {
7895 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7896 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7898 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7899 IEM_MC_ADVANCE_RIP_AND_FINISH();
7900 } IEM_MC_ELSE() {
7901 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7902 } IEM_MC_ENDIF();
7903 IEM_MC_END();
7904 }
7905 else
7906 {
7907 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7908 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7910 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7911 IEM_MC_ADVANCE_RIP_AND_FINISH();
7912 } IEM_MC_ELSE() {
7913 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7914 } IEM_MC_ENDIF();
7915 IEM_MC_END();
7916 }
7917}
7918
7919
7920/**
7921 * @opcode 0x8a
7922 * @opfltest pf
7923 */
7924FNIEMOP_DEF(iemOp_jp_Jv)
7925{
7926 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
7927 IEMOP_HLP_MIN_386();
7928 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7929 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7930 {
7931 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7932 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7934 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7935 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7936 } IEM_MC_ELSE() {
7937 IEM_MC_ADVANCE_RIP_AND_FINISH();
7938 } IEM_MC_ENDIF();
7939 IEM_MC_END();
7940 }
7941 else
7942 {
7943 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7944 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7946 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7947 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7948 } IEM_MC_ELSE() {
7949 IEM_MC_ADVANCE_RIP_AND_FINISH();
7950 } IEM_MC_ENDIF();
7951 IEM_MC_END();
7952 }
7953}
7954
7955
7956/**
7957 * @opcode 0x8b
7958 * @opfltest pf
7959 */
7960FNIEMOP_DEF(iemOp_jnp_Jv)
7961{
7962 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
7963 IEMOP_HLP_MIN_386();
7964 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7965 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7966 {
7967 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7968 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7970 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7971 IEM_MC_ADVANCE_RIP_AND_FINISH();
7972 } IEM_MC_ELSE() {
7973 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7974 } IEM_MC_ENDIF();
7975 IEM_MC_END();
7976 }
7977 else
7978 {
7979 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7980 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7982 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7983 IEM_MC_ADVANCE_RIP_AND_FINISH();
7984 } IEM_MC_ELSE() {
7985 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7986 } IEM_MC_ENDIF();
7987 IEM_MC_END();
7988 }
7989}
7990
7991
7992/**
7993 * @opcode 0x8c
7994 * @opfltest sf,of
7995 */
7996FNIEMOP_DEF(iemOp_jl_Jv)
7997{
7998 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
7999 IEMOP_HLP_MIN_386();
8000 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8001 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8002 {
8003 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8004 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8006 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8007 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8008 } IEM_MC_ELSE() {
8009 IEM_MC_ADVANCE_RIP_AND_FINISH();
8010 } IEM_MC_ENDIF();
8011 IEM_MC_END();
8012 }
8013 else
8014 {
8015 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8016 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8018 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8019 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8020 } IEM_MC_ELSE() {
8021 IEM_MC_ADVANCE_RIP_AND_FINISH();
8022 } IEM_MC_ENDIF();
8023 IEM_MC_END();
8024 }
8025}
8026
8027
8028/**
8029 * @opcode 0x8d
8030 * @opfltest sf,of
8031 */
8032FNIEMOP_DEF(iemOp_jnl_Jv)
8033{
8034 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8035 IEMOP_HLP_MIN_386();
8036 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8037 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8038 {
8039 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8040 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8042 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8043 IEM_MC_ADVANCE_RIP_AND_FINISH();
8044 } IEM_MC_ELSE() {
8045 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8046 } IEM_MC_ENDIF();
8047 IEM_MC_END();
8048 }
8049 else
8050 {
8051 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8052 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8054 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8055 IEM_MC_ADVANCE_RIP_AND_FINISH();
8056 } IEM_MC_ELSE() {
8057 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8058 } IEM_MC_ENDIF();
8059 IEM_MC_END();
8060 }
8061}
8062
8063
8064/**
8065 * @opcode 0x8e
8066 * @opfltest zf,sf,of
8067 */
8068FNIEMOP_DEF(iemOp_jle_Jv)
8069{
8070 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8071 IEMOP_HLP_MIN_386();
8072 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8073 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8074 {
8075 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8076 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8078 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8079 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8080 } IEM_MC_ELSE() {
8081 IEM_MC_ADVANCE_RIP_AND_FINISH();
8082 } IEM_MC_ENDIF();
8083 IEM_MC_END();
8084 }
8085 else
8086 {
8087 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8088 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8090 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8091 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8092 } IEM_MC_ELSE() {
8093 IEM_MC_ADVANCE_RIP_AND_FINISH();
8094 } IEM_MC_ENDIF();
8095 IEM_MC_END();
8096 }
8097}
8098
8099
8100/**
8101 * @opcode 0x8f
8102 * @opfltest zf,sf,of
8103 */
8104FNIEMOP_DEF(iemOp_jnle_Jv)
8105{
8106 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8107 IEMOP_HLP_MIN_386();
8108 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8109 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8110 {
8111 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8112 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8114 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8115 IEM_MC_ADVANCE_RIP_AND_FINISH();
8116 } IEM_MC_ELSE() {
8117 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8118 } IEM_MC_ENDIF();
8119 IEM_MC_END();
8120 }
8121 else
8122 {
8123 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8124 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8126 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8127 IEM_MC_ADVANCE_RIP_AND_FINISH();
8128 } IEM_MC_ELSE() {
8129 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8130 } IEM_MC_ENDIF();
8131 IEM_MC_END();
8132 }
8133}
8134
8135
8136/**
8137 * @opcode 0x90
8138 * @opfltest of
8139 */
8140FNIEMOP_DEF(iemOp_seto_Eb)
8141{
8142 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8143 IEMOP_HLP_MIN_386();
8144 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8145
8146 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8147 * any way. AMD says it's "unused", whatever that means. We're
8148 * ignoring for now. */
8149 if (IEM_IS_MODRM_REG_MODE(bRm))
8150 {
8151 /* register target */
8152 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8154 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8155 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8156 } IEM_MC_ELSE() {
8157 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8158 } IEM_MC_ENDIF();
8159 IEM_MC_ADVANCE_RIP_AND_FINISH();
8160 IEM_MC_END();
8161 }
8162 else
8163 {
8164 /* memory target */
8165 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8169 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8170 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8171 } IEM_MC_ELSE() {
8172 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8173 } IEM_MC_ENDIF();
8174 IEM_MC_ADVANCE_RIP_AND_FINISH();
8175 IEM_MC_END();
8176 }
8177}
8178
8179
8180/**
8181 * @opcode 0x91
8182 * @opfltest of
8183 */
8184FNIEMOP_DEF(iemOp_setno_Eb)
8185{
8186 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8187 IEMOP_HLP_MIN_386();
8188 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8189
8190 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8191 * any way. AMD says it's "unused", whatever that means. We're
8192 * ignoring for now. */
8193 if (IEM_IS_MODRM_REG_MODE(bRm))
8194 {
8195 /* register target */
8196 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8198 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8199 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8200 } IEM_MC_ELSE() {
8201 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8202 } IEM_MC_ENDIF();
8203 IEM_MC_ADVANCE_RIP_AND_FINISH();
8204 IEM_MC_END();
8205 }
8206 else
8207 {
8208 /* memory target */
8209 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8213 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8214 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8215 } IEM_MC_ELSE() {
8216 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8217 } IEM_MC_ENDIF();
8218 IEM_MC_ADVANCE_RIP_AND_FINISH();
8219 IEM_MC_END();
8220 }
8221}
8222
8223
8224/**
8225 * @opcode 0x92
8226 * @opfltest cf
8227 */
8228FNIEMOP_DEF(iemOp_setc_Eb)
8229{
8230 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8231 IEMOP_HLP_MIN_386();
8232 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8233
8234 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8235 * any way. AMD says it's "unused", whatever that means. We're
8236 * ignoring for now. */
8237 if (IEM_IS_MODRM_REG_MODE(bRm))
8238 {
8239 /* register target */
8240 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8242 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8243 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8244 } IEM_MC_ELSE() {
8245 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8246 } IEM_MC_ENDIF();
8247 IEM_MC_ADVANCE_RIP_AND_FINISH();
8248 IEM_MC_END();
8249 }
8250 else
8251 {
8252 /* memory target */
8253 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8257 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8258 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8259 } IEM_MC_ELSE() {
8260 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8261 } IEM_MC_ENDIF();
8262 IEM_MC_ADVANCE_RIP_AND_FINISH();
8263 IEM_MC_END();
8264 }
8265}
8266
8267
8268/**
8269 * @opcode 0x93
8270 * @opfltest cf
8271 */
8272FNIEMOP_DEF(iemOp_setnc_Eb)
8273{
8274 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8275 IEMOP_HLP_MIN_386();
8276 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8277
8278 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8279 * any way. AMD says it's "unused", whatever that means. We're
8280 * ignoring for now. */
8281 if (IEM_IS_MODRM_REG_MODE(bRm))
8282 {
8283 /* register target */
8284 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8286 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8287 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8288 } IEM_MC_ELSE() {
8289 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8290 } IEM_MC_ENDIF();
8291 IEM_MC_ADVANCE_RIP_AND_FINISH();
8292 IEM_MC_END();
8293 }
8294 else
8295 {
8296 /* memory target */
8297 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8298 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8301 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8302 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8303 } IEM_MC_ELSE() {
8304 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8305 } IEM_MC_ENDIF();
8306 IEM_MC_ADVANCE_RIP_AND_FINISH();
8307 IEM_MC_END();
8308 }
8309}
8310
8311
8312/**
8313 * @opcode 0x94
8314 * @opfltest zf
8315 */
8316FNIEMOP_DEF(iemOp_sete_Eb)
8317{
8318 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8319 IEMOP_HLP_MIN_386();
8320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8321
8322 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8323 * any way. AMD says it's "unused", whatever that means. We're
8324 * ignoring for now. */
8325 if (IEM_IS_MODRM_REG_MODE(bRm))
8326 {
8327 /* register target */
8328 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8330 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8331 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8332 } IEM_MC_ELSE() {
8333 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8334 } IEM_MC_ENDIF();
8335 IEM_MC_ADVANCE_RIP_AND_FINISH();
8336 IEM_MC_END();
8337 }
8338 else
8339 {
8340 /* memory target */
8341 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8345 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8346 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8347 } IEM_MC_ELSE() {
8348 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8349 } IEM_MC_ENDIF();
8350 IEM_MC_ADVANCE_RIP_AND_FINISH();
8351 IEM_MC_END();
8352 }
8353}
8354
8355
8356/**
8357 * @opcode 0x95
8358 * @opfltest zf
8359 */
8360FNIEMOP_DEF(iemOp_setne_Eb)
8361{
8362 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8363 IEMOP_HLP_MIN_386();
8364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8365
8366 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8367 * any way. AMD says it's "unused", whatever that means. We're
8368 * ignoring for now. */
8369 if (IEM_IS_MODRM_REG_MODE(bRm))
8370 {
8371 /* register target */
8372 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8374 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8375 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8376 } IEM_MC_ELSE() {
8377 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8378 } IEM_MC_ENDIF();
8379 IEM_MC_ADVANCE_RIP_AND_FINISH();
8380 IEM_MC_END();
8381 }
8382 else
8383 {
8384 /* memory target */
8385 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8389 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8390 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8391 } IEM_MC_ELSE() {
8392 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8393 } IEM_MC_ENDIF();
8394 IEM_MC_ADVANCE_RIP_AND_FINISH();
8395 IEM_MC_END();
8396 }
8397}
8398
8399
8400/**
8401 * @opcode 0x96
8402 * @opfltest cf,zf
8403 */
8404FNIEMOP_DEF(iemOp_setbe_Eb)
8405{
8406 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8407 IEMOP_HLP_MIN_386();
8408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8409
8410 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8411 * any way. AMD says it's "unused", whatever that means. We're
8412 * ignoring for now. */
8413 if (IEM_IS_MODRM_REG_MODE(bRm))
8414 {
8415 /* register target */
8416 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8418 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8419 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8420 } IEM_MC_ELSE() {
8421 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8422 } IEM_MC_ENDIF();
8423 IEM_MC_ADVANCE_RIP_AND_FINISH();
8424 IEM_MC_END();
8425 }
8426 else
8427 {
8428 /* memory target */
8429 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8433 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8434 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8435 } IEM_MC_ELSE() {
8436 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8437 } IEM_MC_ENDIF();
8438 IEM_MC_ADVANCE_RIP_AND_FINISH();
8439 IEM_MC_END();
8440 }
8441}
8442
8443
8444/**
8445 * @opcode 0x97
8446 * @opfltest cf,zf
8447 */
8448FNIEMOP_DEF(iemOp_setnbe_Eb)
8449{
8450 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8451 IEMOP_HLP_MIN_386();
8452 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8453
8454 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8455 * any way. AMD says it's "unused", whatever that means. We're
8456 * ignoring for now. */
8457 if (IEM_IS_MODRM_REG_MODE(bRm))
8458 {
8459 /* register target */
8460 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8462 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8463 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8464 } IEM_MC_ELSE() {
8465 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8466 } IEM_MC_ENDIF();
8467 IEM_MC_ADVANCE_RIP_AND_FINISH();
8468 IEM_MC_END();
8469 }
8470 else
8471 {
8472 /* memory target */
8473 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8477 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8478 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8479 } IEM_MC_ELSE() {
8480 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8481 } IEM_MC_ENDIF();
8482 IEM_MC_ADVANCE_RIP_AND_FINISH();
8483 IEM_MC_END();
8484 }
8485}
8486
8487
8488/**
8489 * @opcode 0x98
8490 * @opfltest sf
8491 */
8492FNIEMOP_DEF(iemOp_sets_Eb)
8493{
8494 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8495 IEMOP_HLP_MIN_386();
8496 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8497
8498 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8499 * any way. AMD says it's "unused", whatever that means. We're
8500 * ignoring for now. */
8501 if (IEM_IS_MODRM_REG_MODE(bRm))
8502 {
8503 /* register target */
8504 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8506 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8507 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8508 } IEM_MC_ELSE() {
8509 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8510 } IEM_MC_ENDIF();
8511 IEM_MC_ADVANCE_RIP_AND_FINISH();
8512 IEM_MC_END();
8513 }
8514 else
8515 {
8516 /* memory target */
8517 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8521 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8522 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8523 } IEM_MC_ELSE() {
8524 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8525 } IEM_MC_ENDIF();
8526 IEM_MC_ADVANCE_RIP_AND_FINISH();
8527 IEM_MC_END();
8528 }
8529}
8530
8531
8532/**
8533 * @opcode 0x99
8534 * @opfltest sf
8535 */
8536FNIEMOP_DEF(iemOp_setns_Eb)
8537{
8538 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8539 IEMOP_HLP_MIN_386();
8540 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8541
8542 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8543 * any way. AMD says it's "unused", whatever that means. We're
8544 * ignoring for now. */
8545 if (IEM_IS_MODRM_REG_MODE(bRm))
8546 {
8547 /* register target */
8548 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8550 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8551 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8552 } IEM_MC_ELSE() {
8553 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8554 } IEM_MC_ENDIF();
8555 IEM_MC_ADVANCE_RIP_AND_FINISH();
8556 IEM_MC_END();
8557 }
8558 else
8559 {
8560 /* memory target */
8561 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8565 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8566 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8567 } IEM_MC_ELSE() {
8568 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8569 } IEM_MC_ENDIF();
8570 IEM_MC_ADVANCE_RIP_AND_FINISH();
8571 IEM_MC_END();
8572 }
8573}
8574
8575
8576/**
8577 * @opcode 0x9a
8578 * @opfltest pf
8579 */
8580FNIEMOP_DEF(iemOp_setp_Eb)
8581{
8582 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8583 IEMOP_HLP_MIN_386();
8584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8585
8586 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8587 * any way. AMD says it's "unused", whatever that means. We're
8588 * ignoring for now. */
8589 if (IEM_IS_MODRM_REG_MODE(bRm))
8590 {
8591 /* register target */
8592 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8594 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8595 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8596 } IEM_MC_ELSE() {
8597 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8598 } IEM_MC_ENDIF();
8599 IEM_MC_ADVANCE_RIP_AND_FINISH();
8600 IEM_MC_END();
8601 }
8602 else
8603 {
8604 /* memory target */
8605 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8607 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8609 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8610 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8611 } IEM_MC_ELSE() {
8612 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8613 } IEM_MC_ENDIF();
8614 IEM_MC_ADVANCE_RIP_AND_FINISH();
8615 IEM_MC_END();
8616 }
8617}
8618
8619
8620/**
8621 * @opcode 0x9b
8622 * @opfltest pf
8623 */
8624FNIEMOP_DEF(iemOp_setnp_Eb)
8625{
8626 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8627 IEMOP_HLP_MIN_386();
8628 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8629
8630 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8631 * any way. AMD says it's "unused", whatever that means. We're
8632 * ignoring for now. */
8633 if (IEM_IS_MODRM_REG_MODE(bRm))
8634 {
8635 /* register target */
8636 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8638 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8639 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8640 } IEM_MC_ELSE() {
8641 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8642 } IEM_MC_ENDIF();
8643 IEM_MC_ADVANCE_RIP_AND_FINISH();
8644 IEM_MC_END();
8645 }
8646 else
8647 {
8648 /* memory target */
8649 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8653 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8654 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8655 } IEM_MC_ELSE() {
8656 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8657 } IEM_MC_ENDIF();
8658 IEM_MC_ADVANCE_RIP_AND_FINISH();
8659 IEM_MC_END();
8660 }
8661}
8662
8663
8664/**
8665 * @opcode 0x9c
8666 * @opfltest sf,of
8667 */
8668FNIEMOP_DEF(iemOp_setl_Eb)
8669{
8670 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8671 IEMOP_HLP_MIN_386();
8672 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8673
8674 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8675 * any way. AMD says it's "unused", whatever that means. We're
8676 * ignoring for now. */
8677 if (IEM_IS_MODRM_REG_MODE(bRm))
8678 {
8679 /* register target */
8680 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8682 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8683 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8684 } IEM_MC_ELSE() {
8685 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8686 } IEM_MC_ENDIF();
8687 IEM_MC_ADVANCE_RIP_AND_FINISH();
8688 IEM_MC_END();
8689 }
8690 else
8691 {
8692 /* memory target */
8693 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8697 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8698 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8699 } IEM_MC_ELSE() {
8700 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8701 } IEM_MC_ENDIF();
8702 IEM_MC_ADVANCE_RIP_AND_FINISH();
8703 IEM_MC_END();
8704 }
8705}
8706
8707
8708/**
8709 * @opcode 0x9d
8710 * @opfltest sf,of
8711 */
8712FNIEMOP_DEF(iemOp_setnl_Eb)
8713{
8714 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8715 IEMOP_HLP_MIN_386();
8716 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8717
8718 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8719 * any way. AMD says it's "unused", whatever that means. We're
8720 * ignoring for now. */
8721 if (IEM_IS_MODRM_REG_MODE(bRm))
8722 {
8723 /* register target */
8724 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8726 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8727 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8728 } IEM_MC_ELSE() {
8729 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8730 } IEM_MC_ENDIF();
8731 IEM_MC_ADVANCE_RIP_AND_FINISH();
8732 IEM_MC_END();
8733 }
8734 else
8735 {
8736 /* memory target */
8737 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8741 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8742 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8743 } IEM_MC_ELSE() {
8744 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8745 } IEM_MC_ENDIF();
8746 IEM_MC_ADVANCE_RIP_AND_FINISH();
8747 IEM_MC_END();
8748 }
8749}
8750
8751
8752/**
8753 * @opcode 0x9e
8754 * @opfltest zf,sf,of
8755 */
8756FNIEMOP_DEF(iemOp_setle_Eb)
8757{
8758 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8759 IEMOP_HLP_MIN_386();
8760 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8761
8762 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8763 * any way. AMD says it's "unused", whatever that means. We're
8764 * ignoring for now. */
8765 if (IEM_IS_MODRM_REG_MODE(bRm))
8766 {
8767 /* register target */
8768 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8770 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8771 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8772 } IEM_MC_ELSE() {
8773 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8774 } IEM_MC_ENDIF();
8775 IEM_MC_ADVANCE_RIP_AND_FINISH();
8776 IEM_MC_END();
8777 }
8778 else
8779 {
8780 /* memory target */
8781 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8783 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8785 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8786 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8787 } IEM_MC_ELSE() {
8788 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8789 } IEM_MC_ENDIF();
8790 IEM_MC_ADVANCE_RIP_AND_FINISH();
8791 IEM_MC_END();
8792 }
8793}
8794
8795
8796/**
8797 * @opcode 0x9f
8798 * @opfltest zf,sf,of
8799 */
8800FNIEMOP_DEF(iemOp_setnle_Eb)
8801{
8802 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8803 IEMOP_HLP_MIN_386();
8804 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8805
8806 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8807 * any way. AMD says it's "unused", whatever that means. We're
8808 * ignoring for now. */
8809 if (IEM_IS_MODRM_REG_MODE(bRm))
8810 {
8811 /* register target */
8812 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8814 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8815 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8816 } IEM_MC_ELSE() {
8817 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8818 } IEM_MC_ENDIF();
8819 IEM_MC_ADVANCE_RIP_AND_FINISH();
8820 IEM_MC_END();
8821 }
8822 else
8823 {
8824 /* memory target */
8825 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8829 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8830 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8831 } IEM_MC_ELSE() {
8832 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8833 } IEM_MC_ENDIF();
8834 IEM_MC_ADVANCE_RIP_AND_FINISH();
8835 IEM_MC_END();
8836 }
8837}
8838
8839
8840/** Opcode 0x0f 0xa0. */
8841FNIEMOP_DEF(iemOp_push_fs)
8842{
8843 IEMOP_MNEMONIC(push_fs, "push fs");
8844 IEMOP_HLP_MIN_386();
8845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8846 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8847}
8848
8849
8850/** Opcode 0x0f 0xa1. */
8851FNIEMOP_DEF(iemOp_pop_fs)
8852{
8853 IEMOP_MNEMONIC(pop_fs, "pop fs");
8854 IEMOP_HLP_MIN_386();
8855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8856 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8857 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
8858 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8859 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8860 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8861 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8862 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS),
8863 iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8864}
8865
8866
8867/** Opcode 0x0f 0xa2. */
8868FNIEMOP_DEF(iemOp_cpuid)
8869{
8870 IEMOP_MNEMONIC(cpuid, "cpuid");
8871 IEMOP_HLP_MIN_486(); /* not all 486es. */
8872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8873 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
8874 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8875 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
8876 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
8877 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX),
8878 iemCImpl_cpuid);
8879}
8880
8881
8882/**
8883 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8884 * iemOp_bts_Ev_Gv.
8885 */
8886
8887#define IEMOP_BODY_BIT_Ev_Gv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
8888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8889 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8890 \
8891 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8892 { \
8893 /* register destination. */ \
8894 switch (pVCpu->iem.s.enmEffOpSize) \
8895 { \
8896 case IEMMODE_16BIT: \
8897 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8899 \
8900 IEM_MC_ARG(uint16_t, u16Src, 2); \
8901 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8902 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
8903 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
8904 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8905 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8906 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
8907 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8908 \
8909 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8910 IEM_MC_END(); \
8911 break; \
8912 \
8913 case IEMMODE_32BIT: \
8914 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8916 \
8917 IEM_MC_ARG(uint32_t, u32Src, 2); \
8918 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8919 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
8920 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
8921 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8922 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8923 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
8924 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8925 \
8926 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8927 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8928 IEM_MC_END(); \
8929 break; \
8930 \
8931 case IEMMODE_64BIT: \
8932 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
8933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8934 \
8935 IEM_MC_ARG(uint64_t, u64Src, 2); \
8936 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8937 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
8938 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
8939 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8940 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8941 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
8942 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8943 \
8944 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8945 IEM_MC_END(); \
8946 break; \
8947 \
8948 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8949 } \
8950 } \
8951 else \
8952 { \
8953 /* memory destination. */ \
8954 /** @todo test negative bit offsets! */ \
8955 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
8956 { \
8957 switch (pVCpu->iem.s.enmEffOpSize) \
8958 { \
8959 case IEMMODE_16BIT: \
8960 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8963 IEMOP_HLP_DONE_DECODING(); \
8964 \
8965 IEM_MC_ARG(uint16_t, u16Src, 2); \
8966 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8967 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
8968 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
8969 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
8970 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
8971 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
8972 \
8973 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8974 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
8975 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8976 \
8977 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8978 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
8979 \
8980 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8981 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8982 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8983 IEM_MC_END(); \
8984 break; \
8985 \
8986 case IEMMODE_32BIT: \
8987 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8988 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8990 IEMOP_HLP_DONE_DECODING(); \
8991 \
8992 IEM_MC_ARG(uint32_t, u32Src, 2); \
8993 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8994 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
8995 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
8996 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
8997 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
8998 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
8999 \
9000 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9001 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
9002 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9003 \
9004 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9005 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
9006 \
9007 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9008 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9009 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9010 IEM_MC_END(); \
9011 break; \
9012 \
9013 case IEMMODE_64BIT: \
9014 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9017 IEMOP_HLP_DONE_DECODING(); \
9018 \
9019 IEM_MC_ARG(uint64_t, u64Src, 2); \
9020 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9021 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9022 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9023 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9024 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9025 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9026 \
9027 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9028 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
9029 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9030 \
9031 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9032 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
9033 \
9034 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9035 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9036 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9037 IEM_MC_END(); \
9038 break; \
9039 \
9040 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9041 } \
9042 } \
9043 else \
9044 { \
9045 (void)0
9046/* Separate macro to work around parsing issue in IEMAllInstPython.py */
9047#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
9048 switch (pVCpu->iem.s.enmEffOpSize) \
9049 { \
9050 case IEMMODE_16BIT: \
9051 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9054 IEMOP_HLP_DONE_DECODING(); \
9055 \
9056 IEM_MC_ARG(uint16_t, u16Src, 2); \
9057 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9058 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9059 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9060 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9061 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9062 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9063 \
9064 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9065 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
9066 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9067 \
9068 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9069 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU16, fEFlagsIn, pu16Dst, u16Src); \
9070 \
9071 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9072 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9073 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9074 IEM_MC_END(); \
9075 break; \
9076 \
9077 case IEMMODE_32BIT: \
9078 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9081 IEMOP_HLP_DONE_DECODING(); \
9082 \
9083 IEM_MC_ARG(uint32_t, u32Src, 2); \
9084 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9085 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9086 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9087 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9088 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9089 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9090 \
9091 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9092 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
9093 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9094 \
9095 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9096 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU32, fEFlagsIn, pu32Dst, u32Src); \
9097 \
9098 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9099 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9100 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9101 IEM_MC_END(); \
9102 break; \
9103 \
9104 case IEMMODE_64BIT: \
9105 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9108 IEMOP_HLP_DONE_DECODING(); \
9109 \
9110 IEM_MC_ARG(uint64_t, u64Src, 2); \
9111 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9112 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9113 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9114 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9115 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9116 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9117 \
9118 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9119 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
9120 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9121 \
9122 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9123 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU64, fEFlagsIn, pu64Dst, u64Src); \
9124 \
9125 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9126 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9127 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9128 IEM_MC_END(); \
9129 break; \
9130 \
9131 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9132 } \
9133 } \
9134 } \
9135 (void)0
9136
9137/* Read-only version (bt). */
9138#define IEMOP_BODY_BIT_Ev_Gv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
9139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9140 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
9141 \
9142 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9143 { \
9144 /* register destination. */ \
9145 switch (pVCpu->iem.s.enmEffOpSize) \
9146 { \
9147 case IEMMODE_16BIT: \
9148 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9150 \
9151 IEM_MC_ARG(uint16_t, u16Src, 2); \
9152 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9153 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9154 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
9155 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9156 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9157 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
9158 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9159 \
9160 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9161 IEM_MC_END(); \
9162 break; \
9163 \
9164 case IEMMODE_32BIT: \
9165 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9167 \
9168 IEM_MC_ARG(uint32_t, u32Src, 2); \
9169 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9170 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9171 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
9172 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9173 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9174 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
9175 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9176 \
9177 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9178 IEM_MC_END(); \
9179 break; \
9180 \
9181 case IEMMODE_64BIT: \
9182 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9184 \
9185 IEM_MC_ARG(uint64_t, u64Src, 2); \
9186 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9187 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9188 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
9189 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9190 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9191 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
9192 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9193 \
9194 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9195 IEM_MC_END(); \
9196 break; \
9197 \
9198 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9199 } \
9200 } \
9201 else \
9202 { \
9203 /* memory destination. */ \
9204 /** @todo test negative bit offsets! */ \
9205 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9206 { \
9207 switch (pVCpu->iem.s.enmEffOpSize) \
9208 { \
9209 case IEMMODE_16BIT: \
9210 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9213 IEMOP_HLP_DONE_DECODING(); \
9214 \
9215 IEM_MC_ARG(uint16_t, u16Src, 2); \
9216 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9217 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9218 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9219 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9220 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9221 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9222 \
9223 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9224 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
9225 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9226 \
9227 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9228 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
9229 \
9230 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9231 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9232 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9233 IEM_MC_END(); \
9234 break; \
9235 \
9236 case IEMMODE_32BIT: \
9237 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9240 IEMOP_HLP_DONE_DECODING(); \
9241 \
9242 IEM_MC_ARG(uint32_t, u32Src, 2); \
9243 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9244 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9245 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9246 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9247 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9248 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9249 \
9250 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
9251 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9252 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9253 \
9254 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9255 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
9256 \
9257 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9258 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9259 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9260 IEM_MC_END(); \
9261 break; \
9262 \
9263 case IEMMODE_64BIT: \
9264 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9265 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9267 IEMOP_HLP_DONE_DECODING(); \
9268 \
9269 IEM_MC_ARG(uint64_t, u64Src, 2); \
9270 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9271 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9272 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9273 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9274 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9275 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9276 \
9277 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9278 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
9279 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9280 \
9281 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9282 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
9283 \
9284 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9285 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9286 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9287 IEM_MC_END(); \
9288 break; \
9289 \
9290 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9291 } \
9292 } \
9293 else \
9294 { \
9295 IEMOP_HLP_DONE_DECODING(); \
9296 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9297 } \
9298 } \
9299 (void)0
9300
9301
9302/**
9303 * @opcode 0xa3
9304 * @oppfx n/a
9305 * @opflclass bitmap
9306 */
9307FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9308{
9309 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9310 IEMOP_HLP_MIN_386();
9311 IEMOP_BODY_BIT_Ev_Gv_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
9312}
9313
9314
9315/**
9316 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9317 */
9318#define IEMOP_BODY_SHLD_SHR_Ib(a_pImplExpr) \
9319 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9320 \
9321 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9322 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9323 \
9324 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9325 { \
9326 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9327 \
9328 switch (pVCpu->iem.s.enmEffOpSize) \
9329 { \
9330 case IEMMODE_16BIT: \
9331 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9333 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9334 IEM_MC_ARG(uint16_t, u16Src, 1); \
9335 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9336 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9337 \
9338 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9339 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9340 IEM_MC_REF_EFLAGS(pEFlags); \
9341 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9342 \
9343 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9344 IEM_MC_END(); \
9345 break; \
9346 \
9347 case IEMMODE_32BIT: \
9348 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9350 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9351 IEM_MC_ARG(uint32_t, u32Src, 1); \
9352 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9353 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9354 \
9355 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9356 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9357 IEM_MC_REF_EFLAGS(pEFlags); \
9358 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9359 \
9360 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9361 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9362 IEM_MC_END(); \
9363 break; \
9364 \
9365 case IEMMODE_64BIT: \
9366 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9368 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9369 IEM_MC_ARG(uint64_t, u64Src, 1); \
9370 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9371 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9372 \
9373 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9374 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9375 IEM_MC_REF_EFLAGS(pEFlags); \
9376 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9377 \
9378 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9379 IEM_MC_END(); \
9380 break; \
9381 \
9382 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9383 } \
9384 } \
9385 else \
9386 { \
9387 switch (pVCpu->iem.s.enmEffOpSize) \
9388 { \
9389 case IEMMODE_16BIT: \
9390 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9393 \
9394 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9396 \
9397 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9398 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9399 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9400 \
9401 IEM_MC_ARG(uint16_t, u16Src, 1); \
9402 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9403 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9404 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9405 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9406 \
9407 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9408 IEM_MC_COMMIT_EFLAGS(EFlags); \
9409 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9410 IEM_MC_END(); \
9411 break; \
9412 \
9413 case IEMMODE_32BIT: \
9414 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9417 \
9418 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9420 \
9421 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9422 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9423 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9424 \
9425 IEM_MC_ARG(uint32_t, u32Src, 1); \
9426 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9427 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9428 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9429 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9430 \
9431 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9432 IEM_MC_COMMIT_EFLAGS(EFlags); \
9433 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9434 IEM_MC_END(); \
9435 break; \
9436 \
9437 case IEMMODE_64BIT: \
9438 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9441 \
9442 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9444 \
9445 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9446 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9447 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9448 \
9449 IEM_MC_ARG(uint64_t, u64Src, 1); \
9450 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9451 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9452 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9453 \
9454 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9455 \
9456 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9457 IEM_MC_COMMIT_EFLAGS(EFlags); \
9458 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9459 IEM_MC_END(); \
9460 break; \
9461 \
9462 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9463 } \
9464 } (void)0
9465
9466
9467/**
9468 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9469 */
9470#define IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(a_pImplExpr) \
9471 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9472 \
9473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9474 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9475 \
9476 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9477 { \
9478 switch (pVCpu->iem.s.enmEffOpSize) \
9479 { \
9480 case IEMMODE_16BIT: \
9481 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9483 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9484 IEM_MC_ARG(uint16_t, u16Src, 1); \
9485 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9486 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9487 \
9488 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9489 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9490 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9491 IEM_MC_REF_EFLAGS(pEFlags); \
9492 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9493 \
9494 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9495 IEM_MC_END(); \
9496 break; \
9497 \
9498 case IEMMODE_32BIT: \
9499 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9501 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9502 IEM_MC_ARG(uint32_t, u32Src, 1); \
9503 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9504 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9505 \
9506 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9507 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9508 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9509 IEM_MC_REF_EFLAGS(pEFlags); \
9510 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9511 \
9512 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9513 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9514 IEM_MC_END(); \
9515 break; \
9516 \
9517 case IEMMODE_64BIT: \
9518 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9520 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9521 IEM_MC_ARG(uint64_t, u64Src, 1); \
9522 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9523 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9524 \
9525 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9526 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9527 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9528 IEM_MC_REF_EFLAGS(pEFlags); \
9529 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9530 \
9531 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9532 IEM_MC_END(); \
9533 break; \
9534 \
9535 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9536 } \
9537 } \
9538 else \
9539 { \
9540 switch (pVCpu->iem.s.enmEffOpSize) \
9541 { \
9542 case IEMMODE_16BIT: \
9543 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9544 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9545 IEM_MC_ARG(uint16_t, u16Src, 1); \
9546 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9548 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9549 \
9550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9552 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9553 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9554 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9555 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9556 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9557 \
9558 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9559 IEM_MC_COMMIT_EFLAGS(EFlags); \
9560 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9561 IEM_MC_END(); \
9562 break; \
9563 \
9564 case IEMMODE_32BIT: \
9565 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9566 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9567 IEM_MC_ARG(uint32_t, u32Src, 1); \
9568 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9570 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9571 \
9572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9574 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9575 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9576 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9577 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9578 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9579 \
9580 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9581 IEM_MC_COMMIT_EFLAGS(EFlags); \
9582 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9583 IEM_MC_END(); \
9584 break; \
9585 \
9586 case IEMMODE_64BIT: \
9587 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9588 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9589 IEM_MC_ARG(uint64_t, u64Src, 1); \
9590 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9592 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9593 \
9594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9596 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9597 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9598 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9599 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9600 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9601 \
9602 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9603 IEM_MC_COMMIT_EFLAGS(EFlags); \
9604 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9605 IEM_MC_END(); \
9606 break; \
9607 \
9608 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9609 } \
9610 } (void)0
9611
9612
9613/**
9614 * @opcode 0xa4
9615 * @opflclass shift_count
9616 */
9617FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9618{
9619 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9620 IEMOP_HLP_MIN_386();
9621 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9622}
9623
9624
9625/**
9626 * @opcode 0xa5
9627 * @opflclass shift_count
9628 */
9629FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9630{
9631 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9632 IEMOP_HLP_MIN_386();
9633 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9634}
9635
9636
9637/** Opcode 0x0f 0xa8. */
9638FNIEMOP_DEF(iemOp_push_gs)
9639{
9640 IEMOP_MNEMONIC(push_gs, "push gs");
9641 IEMOP_HLP_MIN_386();
9642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9643 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9644}
9645
9646
9647/** Opcode 0x0f 0xa9. */
9648FNIEMOP_DEF(iemOp_pop_gs)
9649{
9650 IEMOP_MNEMONIC(pop_gs, "pop gs");
9651 IEMOP_HLP_MIN_386();
9652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9653 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9654 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
9655 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9656 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9657 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9658 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9659 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9660 iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9661}
9662
9663
9664/** Opcode 0x0f 0xaa. */
9665FNIEMOP_DEF(iemOp_rsm)
9666{
9667 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9668 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9670 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9671 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
9672 iemCImpl_rsm);
9673}
9674
9675
9676
9677/**
9678 * @opcode 0xab
9679 * @oppfx n/a
9680 * @opflclass bitmap
9681 */
9682FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9683{
9684 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9685 IEMOP_HLP_MIN_386();
9686 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
9687 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9688}
9689
9690
9691/**
9692 * @opcode 0xac
9693 * @opflclass shift_count
9694 */
9695FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9696{
9697 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9698 IEMOP_HLP_MIN_386();
9699 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9700}
9701
9702
9703/**
9704 * @opcode 0xad
9705 * @opflclass shift_count
9706 */
9707FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9708{
9709 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9710 IEMOP_HLP_MIN_386();
9711 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9712}
9713
9714
9715/** Opcode 0x0f 0xae mem/0. */
9716FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9717{
9718 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9719 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9720 IEMOP_RAISE_INVALID_OPCODE_RET();
9721
9722 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9723 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9726 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9727 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9728 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9729 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9730 IEM_MC_END();
9731}
9732
9733
9734/** Opcode 0x0f 0xae mem/1. */
9735FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9736{
9737 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9738 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9739 IEMOP_RAISE_INVALID_OPCODE_RET();
9740
9741 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9742 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9745 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9746 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9747 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9748 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw) | RT_BIT_64(kIemNativeGstReg_MxCsr),
9749 iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9750 IEM_MC_END();
9751}
9752
9753
9754/**
9755 * @opmaps grp15
9756 * @opcode !11/2
9757 * @oppfx none
9758 * @opcpuid sse
9759 * @opgroup og_sse_mxcsrsm
9760 * @opxcpttype 5
9761 * @optest op1=0 -> mxcsr=0
9762 * @optest op1=0x2083 -> mxcsr=0x2083
9763 * @optest op1=0xfffffffe -> value.xcpt=0xd
9764 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9765 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9766 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9767 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9768 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9769 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9770 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9771 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9772 */
9773FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9774{
9775 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9776 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9777 IEMOP_RAISE_INVALID_OPCODE_RET();
9778
9779 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9780 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9783 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9784 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9785 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_MxCsr), iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9786 IEM_MC_END();
9787}
9788
9789
9790/**
9791 * @opmaps grp15
9792 * @opcode !11/3
9793 * @oppfx none
9794 * @opcpuid sse
9795 * @opgroup og_sse_mxcsrsm
9796 * @opxcpttype 5
9797 * @optest mxcsr=0 -> op1=0
9798 * @optest mxcsr=0x2083 -> op1=0x2083
9799 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9800 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9801 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9802 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9803 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9804 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9805 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9806 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9807 */
9808FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9809{
9810 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9811 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9812 IEMOP_RAISE_INVALID_OPCODE_RET();
9813
9814 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9815 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9818 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9819 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9820 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9821 IEM_MC_END();
9822}
9823
9824
9825/**
9826 * @opmaps grp15
9827 * @opcode !11/4
9828 * @oppfx none
9829 * @opcpuid xsave
9830 * @opgroup og_system
9831 * @opxcpttype none
9832 */
9833FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9834{
9835 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9836 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9837 IEMOP_RAISE_INVALID_OPCODE_RET();
9838
9839 IEM_MC_BEGIN(IEM_MC_F_MIN_CORE, 0);
9840 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9843 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9844 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9845 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9846 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9847 IEM_MC_END();
9848}
9849
9850
9851/**
9852 * @opmaps grp15
9853 * @opcode !11/5
9854 * @oppfx none
9855 * @opcpuid xsave
9856 * @opgroup og_system
9857 * @opxcpttype none
9858 */
9859FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9860{
9861 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9862 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9863 IEMOP_RAISE_INVALID_OPCODE_RET();
9864
9865 IEM_MC_BEGIN(IEM_MC_F_MIN_CORE, 0);
9866 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9869 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9870 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9871 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9872 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw) | RT_BIT_64(kIemNativeGstReg_MxCsr),
9873 iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9874 IEM_MC_END();
9875}
9876
9877/** Opcode 0x0f 0xae mem/6. */
9878FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9879
9880/**
9881 * @opmaps grp15
9882 * @opcode !11/7
9883 * @oppfx none
9884 * @opcpuid clfsh
9885 * @opgroup og_cachectl
9886 * @optest op1=1 ->
9887 */
9888FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9889{
9890 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9891 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9892 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9893
9894 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9895 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9898 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9899 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9900 IEM_MC_END();
9901}
9902
9903/**
9904 * @opmaps grp15
9905 * @opcode !11/7
9906 * @oppfx 0x66
9907 * @opcpuid clflushopt
9908 * @opgroup og_cachectl
9909 * @optest op1=1 ->
9910 */
9911FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9912{
9913 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9914 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9915 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9916
9917 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9918 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9921 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9922 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9923 IEM_MC_END();
9924}
9925
9926
9927/** Opcode 0x0f 0xae 11b/5. */
9928FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9929{
9930 RT_NOREF_PV(bRm);
9931 IEMOP_MNEMONIC(lfence, "lfence");
9932 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9934#ifdef RT_ARCH_ARM64
9935 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9936#else
9937 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9938 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9939 else
9940 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9941#endif
9942 IEM_MC_ADVANCE_RIP_AND_FINISH();
9943 IEM_MC_END();
9944}
9945
9946
9947/** Opcode 0x0f 0xae 11b/6. */
9948FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9949{
9950 RT_NOREF_PV(bRm);
9951 IEMOP_MNEMONIC(mfence, "mfence");
9952 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9954#ifdef RT_ARCH_ARM64
9955 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9956#else
9957 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9958 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9959 else
9960 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9961#endif
9962 IEM_MC_ADVANCE_RIP_AND_FINISH();
9963 IEM_MC_END();
9964}
9965
9966
9967/** Opcode 0x0f 0xae 11b/7. */
9968FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9969{
9970 RT_NOREF_PV(bRm);
9971 IEMOP_MNEMONIC(sfence, "sfence");
9972 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9974#ifdef RT_ARCH_ARM64
9975 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9976#else
9977 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9978 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9979 else
9980 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9981#endif
9982 IEM_MC_ADVANCE_RIP_AND_FINISH();
9983 IEM_MC_END();
9984}
9985
9986
9987/** Opcode 0xf3 0x0f 0xae 11b/0. */
9988FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9989{
9990 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9991 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9992 {
9993 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9995 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9996 IEM_MC_LOCAL(uint64_t, u64Dst);
9997 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9998 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9999 IEM_MC_ADVANCE_RIP_AND_FINISH();
10000 IEM_MC_END();
10001 }
10002 else
10003 {
10004 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10006 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10007 IEM_MC_LOCAL(uint32_t, u32Dst);
10008 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
10009 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10010 IEM_MC_ADVANCE_RIP_AND_FINISH();
10011 IEM_MC_END();
10012 }
10013}
10014
10015
10016/** Opcode 0xf3 0x0f 0xae 11b/1. */
10017FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
10018{
10019 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
10020 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10021 {
10022 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10024 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10025 IEM_MC_LOCAL(uint64_t, u64Dst);
10026 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
10027 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10028 IEM_MC_ADVANCE_RIP_AND_FINISH();
10029 IEM_MC_END();
10030 }
10031 else
10032 {
10033 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10035 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10036 IEM_MC_LOCAL(uint32_t, u32Dst);
10037 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
10038 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10039 IEM_MC_ADVANCE_RIP_AND_FINISH();
10040 IEM_MC_END();
10041 }
10042}
10043
10044
10045/** Opcode 0xf3 0x0f 0xae 11b/2. */
10046FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
10047{
10048 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
10049 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10050 {
10051 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10053 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10054 IEM_MC_LOCAL(uint64_t, u64Dst);
10055 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10056 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10057 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
10058 IEM_MC_ADVANCE_RIP_AND_FINISH();
10059 IEM_MC_END();
10060 }
10061 else
10062 {
10063 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10065 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10066 IEM_MC_LOCAL(uint32_t, u32Dst);
10067 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10068 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
10069 IEM_MC_ADVANCE_RIP_AND_FINISH();
10070 IEM_MC_END();
10071 }
10072}
10073
10074
10075/** Opcode 0xf3 0x0f 0xae 11b/3. */
10076FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
10077{
10078 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
10079 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10080 {
10081 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10083 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10084 IEM_MC_LOCAL(uint64_t, u64Dst);
10085 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10086 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10087 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
10088 IEM_MC_ADVANCE_RIP_AND_FINISH();
10089 IEM_MC_END();
10090 }
10091 else
10092 {
10093 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10095 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10096 IEM_MC_LOCAL(uint32_t, u32Dst);
10097 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10098 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10099 IEM_MC_ADVANCE_RIP_AND_FINISH();
10100 IEM_MC_END();
10101 }
10102}
10103
10104
10105/**
10106 * Group 15 jump table for register variant.
10107 */
10108IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10109{ /* pfx: none, 066h, 0f3h, 0f2h */
10110 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10111 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10112 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10113 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10114 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10115 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10116 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10117 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10118};
10119AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10120
10121
10122/**
10123 * Group 15 jump table for memory variant.
10124 */
10125IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10126{ /* pfx: none, 066h, 0f3h, 0f2h */
10127 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10128 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10129 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10130 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10131 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10132 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10133 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10134 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10135};
10136AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10137
10138
10139/** Opcode 0x0f 0xae. */
10140FNIEMOP_DEF(iemOp_Grp15)
10141{
10142 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10143 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10144 if (IEM_IS_MODRM_REG_MODE(bRm))
10145 /* register, register */
10146 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10147 + pVCpu->iem.s.idxPrefix], bRm);
10148 /* memory, register */
10149 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10150 + pVCpu->iem.s.idxPrefix], bRm);
10151}
10152
10153
10154/**
10155 * @opcode 0xaf
10156 * @opflclass multiply
10157 */
10158FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10159{
10160 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10161 IEMOP_HLP_MIN_386();
10162 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10163 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10165 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_MIN_386, imul, 0);
10166}
10167
10168
10169/**
10170 * @opcode 0xb0
10171 * @opflclass arithmetic
10172 */
10173FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10174{
10175 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10176 IEMOP_HLP_MIN_486();
10177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10178
10179 if (IEM_IS_MODRM_REG_MODE(bRm))
10180 {
10181 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10183 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10184 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10185 IEM_MC_ARG(uint8_t, u8Src, 2);
10186 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10187
10188 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10189 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10190 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10191 IEM_MC_REF_EFLAGS(pEFlags);
10192 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10193
10194 IEM_MC_ADVANCE_RIP_AND_FINISH();
10195 IEM_MC_END();
10196 }
10197 else
10198 {
10199#define IEMOP_BODY_CMPXCHG_BYTE(a_fnWorker, a_Type) \
10200 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10203 IEMOP_HLP_DONE_DECODING(); \
10204 \
10205 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10206 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
10207 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10208 \
10209 IEM_MC_ARG(uint8_t, u8Src, 2); \
10210 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10211 \
10212 IEM_MC_LOCAL(uint8_t, u8Al); \
10213 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX); \
10214 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Al, u8Al, 1); \
10215 \
10216 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10217 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu8Dst, pu8Al, u8Src, pEFlags); \
10218 \
10219 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10220 IEM_MC_COMMIT_EFLAGS(EFlags); \
10221 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al); \
10222 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10223 IEM_MC_END()
10224
10225 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10226 {
10227 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8,RW);
10228 }
10229 else
10230 {
10231 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8_locked,ATOMIC);
10232 }
10233 }
10234}
10235
10236/**
10237 * @opcode 0xb1
10238 * @opflclass arithmetic
10239 */
10240FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10241{
10242 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10243 IEMOP_HLP_MIN_486();
10244 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10245
10246 if (IEM_IS_MODRM_REG_MODE(bRm))
10247 {
10248 switch (pVCpu->iem.s.enmEffOpSize)
10249 {
10250 case IEMMODE_16BIT:
10251 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10253 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10254 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10255 IEM_MC_ARG(uint16_t, u16Src, 2);
10256 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10257
10258 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10259 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10260 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10261 IEM_MC_REF_EFLAGS(pEFlags);
10262 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10263
10264 IEM_MC_ADVANCE_RIP_AND_FINISH();
10265 IEM_MC_END();
10266 break;
10267
10268 case IEMMODE_32BIT:
10269 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10271 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10272 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10273 IEM_MC_ARG(uint32_t, u32Src, 2);
10274 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10275
10276 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10277 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10278 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10279 IEM_MC_REF_EFLAGS(pEFlags);
10280 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10281
10282 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10283 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
10284 } IEM_MC_ELSE() {
10285 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
10286 } IEM_MC_ENDIF();
10287
10288 IEM_MC_ADVANCE_RIP_AND_FINISH();
10289 IEM_MC_END();
10290 break;
10291
10292 case IEMMODE_64BIT:
10293 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10295 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10296 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10297 IEM_MC_ARG(uint64_t, u64Src, 2);
10298 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10299
10300 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10301 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10302 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10303 IEM_MC_REF_EFLAGS(pEFlags);
10304 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10305
10306 IEM_MC_ADVANCE_RIP_AND_FINISH();
10307 IEM_MC_END();
10308 break;
10309
10310 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10311 }
10312 }
10313 else
10314 {
10315#define IEMOP_BODY_CMPXCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64,a_Type) \
10316 do { \
10317 switch (pVCpu->iem.s.enmEffOpSize) \
10318 { \
10319 case IEMMODE_16BIT: \
10320 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10321 \
10322 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10325 IEMOP_HLP_DONE_DECODING(); \
10326 \
10327 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10328 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10329 \
10330 IEM_MC_ARG(uint16_t, u16Src, 2); \
10331 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10332 \
10333 IEM_MC_LOCAL(uint16_t, u16Ax); \
10334 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX); \
10335 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Ax, u16Ax, 1); \
10336 \
10337 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10338 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker16, pu16Dst, pu16Ax, u16Src, pEFlags); \
10339 \
10340 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10341 IEM_MC_COMMIT_EFLAGS(EFlags); \
10342 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax); \
10343 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10344 IEM_MC_END(); \
10345 break; \
10346 \
10347 case IEMMODE_32BIT: \
10348 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10351 IEMOP_HLP_DONE_DECODING(); \
10352 \
10353 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10354 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10355 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10356 \
10357 IEM_MC_ARG(uint32_t, u32Src, 2); \
10358 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10359 \
10360 IEM_MC_LOCAL(uint32_t, u32Eax); \
10361 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX); \
10362 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Eax, u32Eax, 1); \
10363 \
10364 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10365 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker32, pu32Dst, pu32Eax, u32Src, pEFlags); \
10366 \
10367 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10368 IEM_MC_COMMIT_EFLAGS(EFlags); \
10369 \
10370 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
10371 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax); \
10372 } IEM_MC_ENDIF(); \
10373 \
10374 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10375 IEM_MC_END(); \
10376 break; \
10377 \
10378 case IEMMODE_64BIT: \
10379 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10382 IEMOP_HLP_DONE_DECODING(); \
10383 \
10384 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10385 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10386 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10387 \
10388 IEM_MC_ARG(uint64_t, u64Src, 2); \
10389 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10390 \
10391 IEM_MC_LOCAL(uint64_t, u64Rax); \
10392 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX); \
10393 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Rax, u64Rax, 1); \
10394 \
10395 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10396 \
10397 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker64, pu64Dst, pu64Rax, u64Src, pEFlags); \
10398 \
10399 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10400 IEM_MC_COMMIT_EFLAGS(EFlags); \
10401 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax); \
10402 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10403 IEM_MC_END(); \
10404 break; \
10405 \
10406 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10407 } \
10408 } while (0)
10409
10410 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10411 {
10412 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16, iemAImpl_cmpxchg_u32, iemAImpl_cmpxchg_u64,RW);
10413 }
10414 else
10415 {
10416 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16_locked, iemAImpl_cmpxchg_u32_locked, iemAImpl_cmpxchg_u64_locked,ATOMIC);
10417 }
10418 }
10419}
10420
10421
10422/** Opcode 0x0f 0xb2. */
10423FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10424{
10425 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10426 IEMOP_HLP_MIN_386();
10427 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10428 if (IEM_IS_MODRM_REG_MODE(bRm))
10429 IEMOP_RAISE_INVALID_OPCODE_RET();
10430 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10431}
10432
10433
10434/**
10435 * @opcode 0xb3
10436 * @oppfx n/a
10437 * @opflclass bitmap
10438 */
10439FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10440{
10441 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10442 IEMOP_HLP_MIN_386();
10443 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
10444 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10445}
10446
10447
10448/** Opcode 0x0f 0xb4. */
10449FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10450{
10451 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10452 IEMOP_HLP_MIN_386();
10453 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10454 if (IEM_IS_MODRM_REG_MODE(bRm))
10455 IEMOP_RAISE_INVALID_OPCODE_RET();
10456 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10457}
10458
10459
10460/** Opcode 0x0f 0xb5. */
10461FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10462{
10463 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10464 IEMOP_HLP_MIN_386();
10465 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10466 if (IEM_IS_MODRM_REG_MODE(bRm))
10467 IEMOP_RAISE_INVALID_OPCODE_RET();
10468 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10469}
10470
10471
10472/** Opcode 0x0f 0xb6. */
10473FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10474{
10475 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10476 IEMOP_HLP_MIN_386();
10477
10478 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10479
10480 /*
10481 * If rm is denoting a register, no more instruction bytes.
10482 */
10483 if (IEM_IS_MODRM_REG_MODE(bRm))
10484 {
10485 switch (pVCpu->iem.s.enmEffOpSize)
10486 {
10487 case IEMMODE_16BIT:
10488 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10490 IEM_MC_LOCAL(uint16_t, u16Value);
10491 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10492 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10493 IEM_MC_ADVANCE_RIP_AND_FINISH();
10494 IEM_MC_END();
10495 break;
10496
10497 case IEMMODE_32BIT:
10498 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10500 IEM_MC_LOCAL(uint32_t, u32Value);
10501 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10502 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10503 IEM_MC_ADVANCE_RIP_AND_FINISH();
10504 IEM_MC_END();
10505 break;
10506
10507 case IEMMODE_64BIT:
10508 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10510 IEM_MC_LOCAL(uint64_t, u64Value);
10511 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10512 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10513 IEM_MC_ADVANCE_RIP_AND_FINISH();
10514 IEM_MC_END();
10515 break;
10516
10517 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10518 }
10519 }
10520 else
10521 {
10522 /*
10523 * We're loading a register from memory.
10524 */
10525 switch (pVCpu->iem.s.enmEffOpSize)
10526 {
10527 case IEMMODE_16BIT:
10528 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10529 IEM_MC_LOCAL(uint16_t, u16Value);
10530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10533 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10534 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10535 IEM_MC_ADVANCE_RIP_AND_FINISH();
10536 IEM_MC_END();
10537 break;
10538
10539 case IEMMODE_32BIT:
10540 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10541 IEM_MC_LOCAL(uint32_t, u32Value);
10542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10545 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10546 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10547 IEM_MC_ADVANCE_RIP_AND_FINISH();
10548 IEM_MC_END();
10549 break;
10550
10551 case IEMMODE_64BIT:
10552 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10553 IEM_MC_LOCAL(uint64_t, u64Value);
10554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10557 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10558 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10559 IEM_MC_ADVANCE_RIP_AND_FINISH();
10560 IEM_MC_END();
10561 break;
10562
10563 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10564 }
10565 }
10566}
10567
10568
10569/** Opcode 0x0f 0xb7. */
10570FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10571{
10572 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10573 IEMOP_HLP_MIN_386();
10574
10575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10576
10577 /** @todo Not entirely sure how the operand size prefix is handled here,
10578 * assuming that it will be ignored. Would be nice to have a few
10579 * test for this. */
10580
10581 /** @todo There should be no difference in the behaviour whether REX.W is
10582 * present or not... */
10583
10584 /*
10585 * If rm is denoting a register, no more instruction bytes.
10586 */
10587 if (IEM_IS_MODRM_REG_MODE(bRm))
10588 {
10589 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10590 {
10591 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10593 IEM_MC_LOCAL(uint32_t, u32Value);
10594 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10595 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10596 IEM_MC_ADVANCE_RIP_AND_FINISH();
10597 IEM_MC_END();
10598 }
10599 else
10600 {
10601 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10603 IEM_MC_LOCAL(uint64_t, u64Value);
10604 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10605 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10606 IEM_MC_ADVANCE_RIP_AND_FINISH();
10607 IEM_MC_END();
10608 }
10609 }
10610 else
10611 {
10612 /*
10613 * We're loading a register from memory.
10614 */
10615 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10616 {
10617 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10618 IEM_MC_LOCAL(uint32_t, u32Value);
10619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10622 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10623 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10624 IEM_MC_ADVANCE_RIP_AND_FINISH();
10625 IEM_MC_END();
10626 }
10627 else
10628 {
10629 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10630 IEM_MC_LOCAL(uint64_t, u64Value);
10631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10634 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10635 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10636 IEM_MC_ADVANCE_RIP_AND_FINISH();
10637 IEM_MC_END();
10638 }
10639 }
10640}
10641
10642
10643/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10644FNIEMOP_UD_STUB(iemOp_jmpe);
10645
10646
10647/**
10648 * @opcode 0xb8
10649 * @oppfx 0xf3
10650 * @opflmodify cf,pf,af,zf,sf,of
10651 * @opflclear cf,pf,af,sf,of
10652 */
10653FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10654{
10655 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10656 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10657 return iemOp_InvalidNeedRM(pVCpu);
10658#ifndef TST_IEM_CHECK_MC
10659# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10660 static const IEMOPBINSIZES s_Native =
10661 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10662# endif
10663 static const IEMOPBINSIZES s_Fallback =
10664 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10665#endif
10666 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10668 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, popcnt, 0);
10669}
10670
10671
10672/**
10673 * @opcode 0xb9
10674 * @opinvalid intel-modrm
10675 * @optest ->
10676 */
10677FNIEMOP_DEF(iemOp_Grp10)
10678{
10679 /*
10680 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10681 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10682 */
10683 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10684 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10685 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10686}
10687
10688
10689/**
10690 * Body for group 8 bit instruction.
10691 */
10692#define IEMOP_BODY_BIT_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10693 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10694 \
10695 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10696 { \
10697 /* register destination. */ \
10698 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10699 \
10700 switch (pVCpu->iem.s.enmEffOpSize) \
10701 { \
10702 case IEMMODE_16BIT: \
10703 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10705 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10706 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10707 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10708 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10709 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10710 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10711 \
10712 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10713 IEM_MC_END(); \
10714 break; \
10715 \
10716 case IEMMODE_32BIT: \
10717 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10719 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10720 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10721 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10722 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10723 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10724 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10725 \
10726 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
10727 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10728 IEM_MC_END(); \
10729 break; \
10730 \
10731 case IEMMODE_64BIT: \
10732 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10734 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10735 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10736 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10737 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10738 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10739 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10740 \
10741 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10742 IEM_MC_END(); \
10743 break; \
10744 \
10745 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10746 } \
10747 } \
10748 else \
10749 { \
10750 /* memory destination. */ \
10751 /** @todo test negative bit offsets! */ \
10752 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
10753 { \
10754 switch (pVCpu->iem.s.enmEffOpSize) \
10755 { \
10756 case IEMMODE_16BIT: \
10757 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10760 \
10761 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10762 IEMOP_HLP_DONE_DECODING(); \
10763 \
10764 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10765 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10766 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10767 \
10768 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10769 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10770 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10771 \
10772 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10773 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10774 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10775 IEM_MC_END(); \
10776 break; \
10777 \
10778 case IEMMODE_32BIT: \
10779 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10782 \
10783 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10784 IEMOP_HLP_DONE_DECODING(); \
10785 \
10786 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10787 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10788 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10789 \
10790 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10791 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10792 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10793 \
10794 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10795 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10796 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10797 IEM_MC_END(); \
10798 break; \
10799 \
10800 case IEMMODE_64BIT: \
10801 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10802 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10804 \
10805 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10806 IEMOP_HLP_DONE_DECODING(); \
10807 \
10808 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10809 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10810 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10811 \
10812 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10813 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10814 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10815 \
10816 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10817 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10818 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10819 IEM_MC_END(); \
10820 break; \
10821 \
10822 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10823 } \
10824 } \
10825 else \
10826 { \
10827 (void)0
10828/* Separate macro to work around parsing issue in IEMAllInstPython.py */
10829#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10830 switch (pVCpu->iem.s.enmEffOpSize) \
10831 { \
10832 case IEMMODE_16BIT: \
10833 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10836 \
10837 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10838 IEMOP_HLP_DONE_DECODING(); \
10839 \
10840 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10841 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10842 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10843 \
10844 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10845 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10846 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU16, fEFlagsIn, pu16Dst, u16Src); \
10847 \
10848 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10849 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10850 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10851 IEM_MC_END(); \
10852 break; \
10853 \
10854 case IEMMODE_32BIT: \
10855 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10858 \
10859 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10860 IEMOP_HLP_DONE_DECODING(); \
10861 \
10862 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10863 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10864 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10865 \
10866 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10867 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10868 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU32, fEFlagsIn, pu32Dst, u32Src); \
10869 \
10870 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10871 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10872 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10873 IEM_MC_END(); \
10874 break; \
10875 \
10876 case IEMMODE_64BIT: \
10877 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10880 \
10881 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10882 IEMOP_HLP_DONE_DECODING(); \
10883 \
10884 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10885 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10886 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10887 \
10888 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10889 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10890 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU64, fEFlagsIn, pu64Dst, u64Src); \
10891 \
10892 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10893 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10894 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10895 IEM_MC_END(); \
10896 break; \
10897 \
10898 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10899 } \
10900 } \
10901 } \
10902 (void)0
10903
10904/* Read-only version (bt) */
10905#define IEMOP_BODY_BIT_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10906 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10907 \
10908 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10909 { \
10910 /* register destination. */ \
10911 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10912 \
10913 switch (pVCpu->iem.s.enmEffOpSize) \
10914 { \
10915 case IEMMODE_16BIT: \
10916 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10918 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
10919 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10920 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10921 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10922 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10923 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10924 \
10925 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10926 IEM_MC_END(); \
10927 break; \
10928 \
10929 case IEMMODE_32BIT: \
10930 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10932 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
10933 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10934 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10935 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10936 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10937 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10938 \
10939 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10940 IEM_MC_END(); \
10941 break; \
10942 \
10943 case IEMMODE_64BIT: \
10944 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10946 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
10947 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10948 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10949 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10950 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10951 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10952 \
10953 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10954 IEM_MC_END(); \
10955 break; \
10956 \
10957 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10958 } \
10959 } \
10960 else \
10961 { \
10962 /* memory destination. */ \
10963 /** @todo test negative bit offsets! */ \
10964 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10965 { \
10966 switch (pVCpu->iem.s.enmEffOpSize) \
10967 { \
10968 case IEMMODE_16BIT: \
10969 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10972 \
10973 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10974 IEMOP_HLP_DONE_DECODING(); \
10975 \
10976 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10977 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
10978 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10979 \
10980 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10981 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10982 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10983 \
10984 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
10985 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10986 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10987 IEM_MC_END(); \
10988 break; \
10989 \
10990 case IEMMODE_32BIT: \
10991 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10992 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10993 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10994 \
10995 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10996 IEMOP_HLP_DONE_DECODING(); \
10997 \
10998 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10999 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
11000 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11001 \
11002 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11003 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
11004 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
11005 \
11006 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11007 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11008 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11009 IEM_MC_END(); \
11010 break; \
11011 \
11012 case IEMMODE_64BIT: \
11013 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11016 \
11017 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11018 IEMOP_HLP_DONE_DECODING(); \
11019 \
11020 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11021 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
11022 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11023 \
11024 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11025 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
11026 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
11027 \
11028 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11029 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11030 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11031 IEM_MC_END(); \
11032 break; \
11033 \
11034 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11035 } \
11036 } \
11037 else \
11038 { \
11039 IEMOP_HLP_DONE_DECODING(); \
11040 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
11041 } \
11042 } \
11043 (void)0
11044
11045
11046/**
11047 * @opmaps grp8
11048 * @opcode /4
11049 * @oppfx n/a
11050 * @opflclass bitmap
11051 */
11052FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
11053{
11054 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
11055 IEMOP_BODY_BIT_Ev_Ib_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
11056}
11057
11058
11059/**
11060 * @opmaps grp8
11061 * @opcode /5
11062 * @oppfx n/a
11063 * @opflclass bitmap
11064 */
11065FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
11066{
11067 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
11068 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
11069 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
11070}
11071
11072
11073/**
11074 * @opmaps grp8
11075 * @opcode /6
11076 * @oppfx n/a
11077 * @opflclass bitmap
11078 */
11079FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
11080{
11081 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
11082 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
11083 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
11084}
11085
11086
11087/**
11088 * @opmaps grp8
11089 * @opcode /7
11090 * @oppfx n/a
11091 * @opflclass bitmap
11092 */
11093FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
11094{
11095 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
11096 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11097 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11098}
11099
11100
11101/** Opcode 0x0f 0xba. */
11102FNIEMOP_DEF(iemOp_Grp8)
11103{
11104 IEMOP_HLP_MIN_386();
11105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11106 switch (IEM_GET_MODRM_REG_8(bRm))
11107 {
11108 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
11109 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
11110 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
11111 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
11112
11113 case 0: case 1: case 2: case 3:
11114 /* Both AMD and Intel want full modr/m decoding and imm8. */
11115 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
11116
11117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11118 }
11119}
11120
11121
11122/**
11123 * @opcode 0xbb
11124 * @oppfx n/a
11125 * @opflclass bitmap
11126 */
11127FNIEMOP_DEF(iemOp_btc_Ev_Gv)
11128{
11129 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
11130 IEMOP_HLP_MIN_386();
11131 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11132 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11133}
11134
11135
11136/**
11137 * Body for BSF and BSR instructions.
11138 *
11139 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
11140 * the destination register, which means that for 32-bit operations the high
11141 * bits must be left alone.
11142 *
11143 * @param pImpl Pointer to the instruction implementation (assembly).
11144 */
11145#define IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl) \
11146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
11147 \
11148 /* \
11149 * If rm is denoting a register, no more instruction bytes. \
11150 */ \
11151 if (IEM_IS_MODRM_REG_MODE(bRm)) \
11152 { \
11153 switch (pVCpu->iem.s.enmEffOpSize) \
11154 { \
11155 case IEMMODE_16BIT: \
11156 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11158 \
11159 IEM_MC_ARG(uint16_t, u16Src, 2); \
11160 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11161 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
11162 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11163 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11164 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
11165 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11166 \
11167 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11168 IEM_MC_END(); \
11169 break; \
11170 \
11171 case IEMMODE_32BIT: \
11172 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11174 \
11175 IEM_MC_ARG(uint32_t, u32Src, 2); \
11176 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11177 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
11178 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11179 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11180 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
11181 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11182 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11183 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11184 } IEM_MC_ENDIF(); \
11185 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11186 IEM_MC_END(); \
11187 break; \
11188 \
11189 case IEMMODE_64BIT: \
11190 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11192 \
11193 IEM_MC_ARG(uint64_t, u64Src, 2); \
11194 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11195 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
11196 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11197 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11198 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
11199 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11200 \
11201 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11202 IEM_MC_END(); \
11203 break; \
11204 \
11205 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11206 } \
11207 } \
11208 else \
11209 { \
11210 /* \
11211 * We're accessing memory. \
11212 */ \
11213 switch (pVCpu->iem.s.enmEffOpSize) \
11214 { \
11215 case IEMMODE_16BIT: \
11216 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11220 \
11221 IEM_MC_ARG(uint16_t, u16Src, 2); \
11222 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11223 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
11224 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11225 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11226 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
11227 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11228 \
11229 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11230 IEM_MC_END(); \
11231 break; \
11232 \
11233 case IEMMODE_32BIT: \
11234 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11238 \
11239 IEM_MC_ARG(uint32_t, u32Src, 2); \
11240 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11241 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
11242 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11243 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11244 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
11245 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11246 \
11247 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11248 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11249 } IEM_MC_ENDIF(); \
11250 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11251 IEM_MC_END(); \
11252 break; \
11253 \
11254 case IEMMODE_64BIT: \
11255 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11259 \
11260 IEM_MC_ARG(uint64_t, u64Src, 2); \
11261 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11262 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
11263 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11264 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11265 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
11266 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11267 \
11268 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11269 IEM_MC_END(); \
11270 break; \
11271 \
11272 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11273 } \
11274 } (void)0
11275
11276
11277/**
11278 * @opcode 0xbc
11279 * @oppfx !0xf3
11280 * @opfltest cf,pf,af,sf,of
11281 * @opflmodify cf,pf,af,zf,sf,of
11282 * @opflundef cf,pf,af,sf,of
11283 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11284 * document them as inputs. Sigh.
11285 */
11286FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11287{
11288 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11289 IEMOP_HLP_MIN_386();
11290 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11291 PCIEMOPBINSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags);
11292 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11293}
11294
11295
11296/**
11297 * @opcode 0xbc
11298 * @oppfx 0xf3
11299 * @opfltest pf,af,sf,of
11300 * @opflmodify cf,pf,af,zf,sf,of
11301 * @opflundef pf,af,sf,of
11302 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11303 * document them as inputs. Sigh.
11304 */
11305FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11306{
11307 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11308 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11309 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11310
11311#ifndef TST_IEM_CHECK_MC
11312 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11313 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11314 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11315 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11316 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11317 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11318 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11319 {
11320 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11321 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11322 };
11323#endif
11324 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11325 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11326 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11328 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, tzcnt, 0);
11329}
11330
11331
11332/**
11333 * @opcode 0xbd
11334 * @oppfx !0xf3
11335 * @opfltest cf,pf,af,sf,of
11336 * @opflmodify cf,pf,af,zf,sf,of
11337 * @opflundef cf,pf,af,sf,of
11338 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11339 * document them as inputs. Sigh.
11340 */
11341FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11342{
11343 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11344 IEMOP_HLP_MIN_386();
11345 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11346 PCIEMOPBINSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags);
11347 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11348}
11349
11350
11351/**
11352 * @opcode 0xbd
11353 * @oppfx 0xf3
11354 * @opfltest pf,af,sf,of
11355 * @opflmodify cf,pf,af,zf,sf,of
11356 * @opflundef pf,af,sf,of
11357 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11358 * document them as inputs. Sigh.
11359 */
11360FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11361{
11362 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAbm)
11363 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11364 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11365
11366#ifndef TST_IEM_CHECK_MC
11367 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11368 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11369 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11370 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11371 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11372 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11373 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11374 {
11375 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11376 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11377 };
11378#endif
11379 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11380 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11381 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11382 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11383 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, lzcnt, 0);
11384}
11385
11386
11387
11388/** Opcode 0x0f 0xbe. */
11389FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11390{
11391 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11392 IEMOP_HLP_MIN_386();
11393
11394 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11395
11396 /*
11397 * If rm is denoting a register, no more instruction bytes.
11398 */
11399 if (IEM_IS_MODRM_REG_MODE(bRm))
11400 {
11401 switch (pVCpu->iem.s.enmEffOpSize)
11402 {
11403 case IEMMODE_16BIT:
11404 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11406 IEM_MC_LOCAL(uint16_t, u16Value);
11407 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11408 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11409 IEM_MC_ADVANCE_RIP_AND_FINISH();
11410 IEM_MC_END();
11411 break;
11412
11413 case IEMMODE_32BIT:
11414 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11416 IEM_MC_LOCAL(uint32_t, u32Value);
11417 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11418 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11419 IEM_MC_ADVANCE_RIP_AND_FINISH();
11420 IEM_MC_END();
11421 break;
11422
11423 case IEMMODE_64BIT:
11424 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11426 IEM_MC_LOCAL(uint64_t, u64Value);
11427 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11428 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11429 IEM_MC_ADVANCE_RIP_AND_FINISH();
11430 IEM_MC_END();
11431 break;
11432
11433 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11434 }
11435 }
11436 else
11437 {
11438 /*
11439 * We're loading a register from memory.
11440 */
11441 switch (pVCpu->iem.s.enmEffOpSize)
11442 {
11443 case IEMMODE_16BIT:
11444 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11445 IEM_MC_LOCAL(uint16_t, u16Value);
11446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11449 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11450 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11451 IEM_MC_ADVANCE_RIP_AND_FINISH();
11452 IEM_MC_END();
11453 break;
11454
11455 case IEMMODE_32BIT:
11456 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11457 IEM_MC_LOCAL(uint32_t, u32Value);
11458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11461 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11462 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11463 IEM_MC_ADVANCE_RIP_AND_FINISH();
11464 IEM_MC_END();
11465 break;
11466
11467 case IEMMODE_64BIT:
11468 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11469 IEM_MC_LOCAL(uint64_t, u64Value);
11470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11473 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11474 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11475 IEM_MC_ADVANCE_RIP_AND_FINISH();
11476 IEM_MC_END();
11477 break;
11478
11479 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11480 }
11481 }
11482}
11483
11484
11485/** Opcode 0x0f 0xbf. */
11486FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11487{
11488 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11489 IEMOP_HLP_MIN_386();
11490
11491 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11492
11493 /** @todo Not entirely sure how the operand size prefix is handled here,
11494 * assuming that it will be ignored. Would be nice to have a few
11495 * test for this. */
11496 /*
11497 * If rm is denoting a register, no more instruction bytes.
11498 */
11499 if (IEM_IS_MODRM_REG_MODE(bRm))
11500 {
11501 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11502 {
11503 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11505 IEM_MC_LOCAL(uint32_t, u32Value);
11506 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11507 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11508 IEM_MC_ADVANCE_RIP_AND_FINISH();
11509 IEM_MC_END();
11510 }
11511 else
11512 {
11513 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11515 IEM_MC_LOCAL(uint64_t, u64Value);
11516 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11517 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11518 IEM_MC_ADVANCE_RIP_AND_FINISH();
11519 IEM_MC_END();
11520 }
11521 }
11522 else
11523 {
11524 /*
11525 * We're loading a register from memory.
11526 */
11527 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11528 {
11529 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11530 IEM_MC_LOCAL(uint32_t, u32Value);
11531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11534 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11535 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11536 IEM_MC_ADVANCE_RIP_AND_FINISH();
11537 IEM_MC_END();
11538 }
11539 else
11540 {
11541 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11542 IEM_MC_LOCAL(uint64_t, u64Value);
11543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11546 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11547 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11548 IEM_MC_ADVANCE_RIP_AND_FINISH();
11549 IEM_MC_END();
11550 }
11551 }
11552}
11553
11554
11555/**
11556 * @opcode 0xc0
11557 * @opflclass arithmetic
11558 */
11559FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11560{
11561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11562 IEMOP_HLP_MIN_486();
11563 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11564
11565 /*
11566 * If rm is denoting a register, no more instruction bytes.
11567 */
11568 if (IEM_IS_MODRM_REG_MODE(bRm))
11569 {
11570 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11572 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11573 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11574 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11575
11576 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11577 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11578 IEM_MC_REF_EFLAGS(pEFlags);
11579 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11580
11581 IEM_MC_ADVANCE_RIP_AND_FINISH();
11582 IEM_MC_END();
11583 }
11584 else
11585 {
11586 /*
11587 * We're accessing memory.
11588 */
11589#define IEMOP_BODY_XADD_BYTE(a_fnWorker, a_Type) \
11590 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11593 IEMOP_HLP_DONE_DECODING(); \
11594 \
11595 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11596 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11597 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11598 \
11599 IEM_MC_LOCAL(uint8_t, u8RegCopy); \
11600 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11601 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, u8RegCopy, 1); \
11602 \
11603 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11604 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker, pu8Dst, pu8Reg, pEFlags); \
11605 \
11606 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11607 IEM_MC_COMMIT_EFLAGS(EFlags); \
11608 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy); \
11609 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11610 IEM_MC_END()
11611 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11612 {
11613 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8,RW);
11614 }
11615 else
11616 {
11617 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8_locked,ATOMIC);
11618 }
11619 }
11620}
11621
11622
11623/**
11624 * @opcode 0xc1
11625 * @opflclass arithmetic
11626 */
11627FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11628{
11629 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11630 IEMOP_HLP_MIN_486();
11631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11632
11633 /*
11634 * If rm is denoting a register, no more instruction bytes.
11635 */
11636 if (IEM_IS_MODRM_REG_MODE(bRm))
11637 {
11638 switch (pVCpu->iem.s.enmEffOpSize)
11639 {
11640 case IEMMODE_16BIT:
11641 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11643 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11644 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11645 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11646
11647 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11648 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11649 IEM_MC_REF_EFLAGS(pEFlags);
11650 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11651
11652 IEM_MC_ADVANCE_RIP_AND_FINISH();
11653 IEM_MC_END();
11654 break;
11655
11656 case IEMMODE_32BIT:
11657 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11659 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11660 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11661 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11662
11663 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11664 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11665 IEM_MC_REF_EFLAGS(pEFlags);
11666 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11667
11668 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
11669 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11670 IEM_MC_ADVANCE_RIP_AND_FINISH();
11671 IEM_MC_END();
11672 break;
11673
11674 case IEMMODE_64BIT:
11675 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11677 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11678 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11679 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11680
11681 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11682 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11683 IEM_MC_REF_EFLAGS(pEFlags);
11684 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11685
11686 IEM_MC_ADVANCE_RIP_AND_FINISH();
11687 IEM_MC_END();
11688 break;
11689
11690 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11691 }
11692 }
11693 else
11694 {
11695 /*
11696 * We're accessing memory.
11697 */
11698#define IEMOP_BODY_XADD_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
11699 do { \
11700 switch (pVCpu->iem.s.enmEffOpSize) \
11701 { \
11702 case IEMMODE_16BIT: \
11703 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11706 IEMOP_HLP_DONE_DECODING(); \
11707 \
11708 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11709 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11710 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11711 \
11712 IEM_MC_LOCAL(uint16_t, u16RegCopy); \
11713 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11714 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, u16RegCopy, 1); \
11715 \
11716 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11717 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker16, pu16Dst, pu16Reg, pEFlags); \
11718 \
11719 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11720 IEM_MC_COMMIT_EFLAGS(EFlags); \
11721 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy); \
11722 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11723 IEM_MC_END(); \
11724 break; \
11725 \
11726 case IEMMODE_32BIT: \
11727 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11730 IEMOP_HLP_DONE_DECODING(); \
11731 \
11732 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11733 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11734 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11735 \
11736 IEM_MC_LOCAL(uint32_t, u32RegCopy); \
11737 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11738 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, u32RegCopy, 1); \
11739 \
11740 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11741 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker32, pu32Dst, pu32Reg, pEFlags); \
11742 \
11743 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11744 IEM_MC_COMMIT_EFLAGS(EFlags); \
11745 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy); \
11746 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11747 IEM_MC_END(); \
11748 break; \
11749 \
11750 case IEMMODE_64BIT: \
11751 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11754 IEMOP_HLP_DONE_DECODING(); \
11755 \
11756 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11757 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11758 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11759 \
11760 IEM_MC_LOCAL(uint64_t, u64RegCopy); \
11761 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11762 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, u64RegCopy, 1); \
11763 \
11764 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11765 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker64, pu64Dst, pu64Reg, pEFlags); \
11766 \
11767 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11768 IEM_MC_COMMIT_EFLAGS(EFlags); \
11769 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy); \
11770 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11771 IEM_MC_END(); \
11772 break; \
11773 \
11774 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11775 } \
11776 } while (0)
11777
11778 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11779 {
11780 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16, iemAImpl_xadd_u32, iemAImpl_xadd_u64,RW);
11781 }
11782 else
11783 {
11784 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16_locked, iemAImpl_xadd_u32_locked, iemAImpl_xadd_u64_locked,ATOMIC);
11785 }
11786 }
11787}
11788
11789
11790/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11791FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11792{
11793 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11794
11795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11796 if (IEM_IS_MODRM_REG_MODE(bRm))
11797 {
11798 /*
11799 * XMM, XMM.
11800 */
11801 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11802 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11804 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11805 IEM_MC_LOCAL(X86XMMREG, Dst);
11806 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11807 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11808 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11809 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11810 IEM_MC_PREPARE_SSE_USAGE();
11811 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11812 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpps_u128, pDst, pSrc, bImmArg);
11813 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11814
11815 IEM_MC_ADVANCE_RIP_AND_FINISH();
11816 IEM_MC_END();
11817 }
11818 else
11819 {
11820 /*
11821 * XMM, [mem128].
11822 */
11823 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11824 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11825 IEM_MC_LOCAL(X86XMMREG, Dst);
11826 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11827 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11829
11830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11831 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11832 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11834 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11835 IEM_MC_PREPARE_SSE_USAGE();
11836
11837 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11838 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpps_u128, pDst, pSrc, bImmArg);
11839 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11840
11841 IEM_MC_ADVANCE_RIP_AND_FINISH();
11842 IEM_MC_END();
11843 }
11844}
11845
11846
11847/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11848FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11849{
11850 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11851
11852 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11853 if (IEM_IS_MODRM_REG_MODE(bRm))
11854 {
11855 /*
11856 * XMM, XMM.
11857 */
11858 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11859 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11861 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11862 IEM_MC_LOCAL(X86XMMREG, Dst);
11863 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11864 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11865 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11866 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11867 IEM_MC_PREPARE_SSE_USAGE();
11868 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11869 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmppd_u128, pDst, pSrc, bImmArg);
11870 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11871
11872 IEM_MC_ADVANCE_RIP_AND_FINISH();
11873 IEM_MC_END();
11874 }
11875 else
11876 {
11877 /*
11878 * XMM, [mem128].
11879 */
11880 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11881 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11882 IEM_MC_LOCAL(X86XMMREG, Dst);
11883 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11884 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11885 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11886
11887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11888 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11889 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11891 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11892 IEM_MC_PREPARE_SSE_USAGE();
11893
11894 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11895 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmppd_u128, pDst, pSrc, bImmArg);
11896 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11897
11898 IEM_MC_ADVANCE_RIP_AND_FINISH();
11899 IEM_MC_END();
11900 }
11901}
11902
11903
11904/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11905FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11906{
11907 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11908
11909 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11910 if (IEM_IS_MODRM_REG_MODE(bRm))
11911 {
11912 /*
11913 * XMM32, XMM32.
11914 */
11915 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11916 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11918 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11919 IEM_MC_LOCAL(X86XMMREG, Dst);
11920 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11921 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11922 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11923 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11924 IEM_MC_PREPARE_SSE_USAGE();
11925 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11926 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpss_u128, pDst, pSrc, bImmArg);
11927 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11928
11929 IEM_MC_ADVANCE_RIP_AND_FINISH();
11930 IEM_MC_END();
11931 }
11932 else
11933 {
11934 /*
11935 * XMM32, [mem32].
11936 */
11937 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11938 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11939 IEM_MC_LOCAL(X86XMMREG, Dst);
11940 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11941 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11943
11944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11945 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11946 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11948 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11949 IEM_MC_PREPARE_SSE_USAGE();
11950
11951 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
11952 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11953 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpss_u128, pDst, pSrc, bImmArg);
11954 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11955
11956 IEM_MC_ADVANCE_RIP_AND_FINISH();
11957 IEM_MC_END();
11958 }
11959}
11960
11961
11962/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11963FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11964{
11965 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11966
11967 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11968 if (IEM_IS_MODRM_REG_MODE(bRm))
11969 {
11970 /*
11971 * XMM64, XMM64.
11972 */
11973 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11974 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11976 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11977 IEM_MC_LOCAL(X86XMMREG, Dst);
11978 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11979 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11980 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11981 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11982 IEM_MC_PREPARE_SSE_USAGE();
11983 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11984 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpsd_u128, pDst, pSrc, bImmArg);
11985 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11986
11987 IEM_MC_ADVANCE_RIP_AND_FINISH();
11988 IEM_MC_END();
11989 }
11990 else
11991 {
11992 /*
11993 * XMM64, [mem64].
11994 */
11995 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11996 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11997 IEM_MC_LOCAL(X86XMMREG, Dst);
11998 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11999 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
12000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12001
12002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12003 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12004 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12006 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12007 IEM_MC_PREPARE_SSE_USAGE();
12008
12009 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
12010 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12011 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpsd_u128, pDst, pSrc, bImmArg);
12012 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
12013
12014 IEM_MC_ADVANCE_RIP_AND_FINISH();
12015 IEM_MC_END();
12016 }
12017}
12018
12019
12020/** Opcode 0x0f 0xc3. */
12021FNIEMOP_DEF(iemOp_movnti_My_Gy)
12022{
12023 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
12024
12025 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12026
12027 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
12028 if (IEM_IS_MODRM_MEM_MODE(bRm))
12029 {
12030 switch (pVCpu->iem.s.enmEffOpSize)
12031 {
12032 case IEMMODE_32BIT:
12033 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
12034 IEM_MC_LOCAL(uint32_t, u32Value);
12035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12036
12037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12039
12040 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12041 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
12042 IEM_MC_ADVANCE_RIP_AND_FINISH();
12043 IEM_MC_END();
12044 break;
12045
12046 case IEMMODE_64BIT:
12047 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
12048 IEM_MC_LOCAL(uint64_t, u64Value);
12049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12050
12051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12053
12054 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12055 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
12056 IEM_MC_ADVANCE_RIP_AND_FINISH();
12057 IEM_MC_END();
12058 break;
12059
12060 case IEMMODE_16BIT:
12061 /** @todo check this form. */
12062 IEMOP_RAISE_INVALID_OPCODE_RET();
12063
12064 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12065 }
12066 }
12067 else
12068 IEMOP_RAISE_INVALID_OPCODE_RET();
12069}
12070
12071
12072/* Opcode 0x66 0x0f 0xc3 - invalid */
12073/* Opcode 0xf3 0x0f 0xc3 - invalid */
12074/* Opcode 0xf2 0x0f 0xc3 - invalid */
12075
12076
12077/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
12078FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
12079{
12080 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12082 if (IEM_IS_MODRM_REG_MODE(bRm))
12083 {
12084 /*
12085 * Register, register.
12086 */
12087 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12088 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12089 IEM_MC_LOCAL(uint16_t, uValue);
12090
12091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12092 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12093 IEM_MC_PREPARE_FPU_USAGE();
12094 IEM_MC_FPU_TO_MMX_MODE();
12095
12096 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
12097 IEM_MC_STORE_MREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
12098
12099 IEM_MC_ADVANCE_RIP_AND_FINISH();
12100 IEM_MC_END();
12101 }
12102 else
12103 {
12104 /*
12105 * Register, memory.
12106 */
12107 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12109 IEM_MC_LOCAL(uint16_t, uValue);
12110
12111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12112 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12114 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12115 IEM_MC_PREPARE_FPU_USAGE();
12116
12117 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12118 IEM_MC_FPU_TO_MMX_MODE();
12119 IEM_MC_STORE_MREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
12120
12121 IEM_MC_ADVANCE_RIP_AND_FINISH();
12122 IEM_MC_END();
12123 }
12124}
12125
12126
12127/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
12128FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
12129{
12130 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12131 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12132 if (IEM_IS_MODRM_REG_MODE(bRm))
12133 {
12134 /*
12135 * Register, register.
12136 */
12137 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12138 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12140
12141 IEM_MC_LOCAL(uint16_t, uValue);
12142 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12143 IEM_MC_PREPARE_SSE_USAGE();
12144
12145 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
12146 IEM_MC_STORE_XREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
12147 IEM_MC_ADVANCE_RIP_AND_FINISH();
12148 IEM_MC_END();
12149 }
12150 else
12151 {
12152 /*
12153 * Register, memory.
12154 */
12155 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12157 IEM_MC_LOCAL(uint16_t, uValue);
12158
12159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12160 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12162 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12163 IEM_MC_PREPARE_SSE_USAGE();
12164
12165 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12166 IEM_MC_STORE_XREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
12167 IEM_MC_ADVANCE_RIP_AND_FINISH();
12168 IEM_MC_END();
12169 }
12170}
12171
12172
12173/* Opcode 0xf3 0x0f 0xc4 - invalid */
12174/* Opcode 0xf2 0x0f 0xc4 - invalid */
12175
12176
12177/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
12178FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
12179{
12180 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
12181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12182 if (IEM_IS_MODRM_REG_MODE(bRm))
12183 {
12184 /*
12185 * Greg32, MMX, imm8.
12186 */
12187 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12188 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12190 IEM_MC_LOCAL(uint16_t, uValue);
12191 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12192 IEM_MC_PREPARE_FPU_USAGE();
12193 IEM_MC_FPU_TO_MMX_MODE();
12194 IEM_MC_FETCH_MREG_U16(uValue, IEM_GET_MODRM_RM_8(bRm), bImm & 3);
12195 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
12196 IEM_MC_ADVANCE_RIP_AND_FINISH();
12197 IEM_MC_END();
12198 }
12199 /* No memory operand. */
12200 else
12201 IEMOP_RAISE_INVALID_OPCODE_RET();
12202}
12203
12204
12205/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12206FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12207{
12208 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12209 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12210 if (IEM_IS_MODRM_REG_MODE(bRm))
12211 {
12212 /*
12213 * Greg32, XMM, imm8.
12214 */
12215 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12216 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12218 IEM_MC_LOCAL(uint16_t, uValue);
12219 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12220 IEM_MC_PREPARE_SSE_USAGE();
12221 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
12222 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
12223 IEM_MC_ADVANCE_RIP_AND_FINISH();
12224 IEM_MC_END();
12225 }
12226 /* No memory operand. */
12227 else
12228 IEMOP_RAISE_INVALID_OPCODE_RET();
12229}
12230
12231
12232/* Opcode 0xf3 0x0f 0xc5 - invalid */
12233/* Opcode 0xf2 0x0f 0xc5 - invalid */
12234
12235
12236/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12237FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12238{
12239 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12240 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12241 if (IEM_IS_MODRM_REG_MODE(bRm))
12242 {
12243 /*
12244 * XMM, XMM, imm8.
12245 */
12246 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12247 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12249 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12250 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12251 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12252 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12253 IEM_MC_PREPARE_SSE_USAGE();
12254 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12255 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12256 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12257 IEM_MC_ADVANCE_RIP_AND_FINISH();
12258 IEM_MC_END();
12259 }
12260 else
12261 {
12262 /*
12263 * XMM, [mem128], imm8.
12264 */
12265 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12266 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12267 IEM_MC_LOCAL(RTUINT128U, uSrc);
12268 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12270
12271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12272 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12273 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12275 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12276 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12277
12278 IEM_MC_PREPARE_SSE_USAGE();
12279 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12280 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12281
12282 IEM_MC_ADVANCE_RIP_AND_FINISH();
12283 IEM_MC_END();
12284 }
12285}
12286
12287
12288/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12289FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12290{
12291 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12292 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12293 if (IEM_IS_MODRM_REG_MODE(bRm))
12294 {
12295 /*
12296 * XMM, XMM, imm8.
12297 */
12298 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12299 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12301 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12302 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12303 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12304 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12305 IEM_MC_PREPARE_SSE_USAGE();
12306 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12307 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12308 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12309 IEM_MC_ADVANCE_RIP_AND_FINISH();
12310 IEM_MC_END();
12311 }
12312 else
12313 {
12314 /*
12315 * XMM, [mem128], imm8.
12316 */
12317 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12318 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12319 IEM_MC_LOCAL(RTUINT128U, uSrc);
12320 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12322
12323 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12324 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12325 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12328 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12329
12330 IEM_MC_PREPARE_SSE_USAGE();
12331 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12332 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12333
12334 IEM_MC_ADVANCE_RIP_AND_FINISH();
12335 IEM_MC_END();
12336 }
12337}
12338
12339
12340/* Opcode 0xf3 0x0f 0xc6 - invalid */
12341/* Opcode 0xf2 0x0f 0xc6 - invalid */
12342
12343
12344/**
12345 * @opmaps grp9
12346 * @opcode /1
12347 * @opcodesub !11 mr/reg rex.w=0
12348 * @oppfx n/a
12349 * @opflmodify zf
12350 */
12351FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12352{
12353 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12354#define IEMOP_BODY_CMPXCHG8B(a_fnWorker, a_Type) \
12355 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
12356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12358 IEMOP_HLP_DONE_DECODING_EX(fCmpXchg8b); \
12359 \
12360 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12361 IEM_MC_ARG(uint64_t *, pu64MemDst, 0); \
12362 IEM_MC_MEM_MAP_U64_##a_Type(pu64MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12363 \
12364 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx); \
12365 IEM_MC_FETCH_GREG_PAIR_U32(u64EaxEdx, X86_GREG_xAX, X86_GREG_xDX); \
12366 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EaxEdx, u64EaxEdx, 1); \
12367 \
12368 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx); \
12369 IEM_MC_FETCH_GREG_PAIR_U32(u64EbxEcx, X86_GREG_xBX, X86_GREG_xCX); \
12370 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EbxEcx, u64EbxEcx, 2); \
12371 \
12372 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
12373 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags); \
12374 \
12375 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12376 IEM_MC_COMMIT_EFLAGS(EFlags); \
12377 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12378 IEM_MC_STORE_GREG_PAIR_U32(X86_GREG_xAX, X86_GREG_xDX, u64EaxEdx); \
12379 } IEM_MC_ENDIF(); \
12380 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12381 \
12382 IEM_MC_END()
12383 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12384 {
12385 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b,RW);
12386 }
12387 else
12388 {
12389 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b_locked,ATOMIC);
12390 }
12391}
12392
12393
12394/**
12395 * @opmaps grp9
12396 * @opcode /1
12397 * @opcodesub !11 mr/reg rex.w=1
12398 * @oppfx n/a
12399 * @opflmodify zf
12400 */
12401FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12402{
12403 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12404 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12405 {
12406 /*
12407 * This is hairy, very hairy macro fun. We're walking a fine line
12408 * here to make the code parsable by IEMAllInstPython.py and fit into
12409 * the patterns IEMAllThrdPython.py requires for the code morphing.
12410 */
12411#define BODY_CMPXCHG16B_HEAD(bUnmapInfoStmt, a_Type) \
12412 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
12413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12415 IEMOP_HLP_DONE_DECODING(); \
12416 \
12417 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16); \
12418 bUnmapInfoStmt; \
12419 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0); \
12420 IEM_MC_MEM_MAP_U128_##a_Type(pu128MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12421 \
12422 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx); \
12423 IEM_MC_FETCH_GREG_PAIR_U64(u128RaxRdx, X86_GREG_xAX, X86_GREG_xDX); \
12424 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RaxRdx, u128RaxRdx, 1); \
12425 \
12426 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx); \
12427 IEM_MC_FETCH_GREG_PAIR_U64(u128RbxRcx, X86_GREG_xBX, X86_GREG_xCX); \
12428 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RbxRcx, u128RbxRcx, 2); \
12429 \
12430 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3)
12431
12432#define BODY_CMPXCHG16B_TAIL(a_Type) \
12433 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12434 IEM_MC_COMMIT_EFLAGS(EFlags); \
12435 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12436 IEM_MC_STORE_GREG_PAIR_U64(X86_GREG_xAX, X86_GREG_xDX, u128RaxRdx); \
12437 } IEM_MC_ENDIF(); \
12438 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12439 IEM_MC_END()
12440
12441#ifdef RT_ARCH_AMD64
12442 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12443 {
12444 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12445 {
12446 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12447 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12448 BODY_CMPXCHG16B_TAIL(RW);
12449 }
12450 else
12451 {
12452 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12453 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12454 BODY_CMPXCHG16B_TAIL(ATOMIC);
12455 }
12456 }
12457 else
12458 { /* (see comments in #else case below) */
12459 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12460 {
12461 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12462 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12463 BODY_CMPXCHG16B_TAIL(RW);
12464 }
12465 else
12466 {
12467 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12468 IEM_MC_CALL_CIMPL_5(IEM_CIMPL_F_STATUS_FLAGS,
12469 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12470 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12471 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx,
12472 pEFlags, bUnmapInfo);
12473 IEM_MC_END();
12474 }
12475 }
12476
12477#elif defined(RT_ARCH_ARM64)
12478 /** @todo may require fallback for unaligned accesses... */
12479 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12480 {
12481 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12482 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12483 BODY_CMPXCHG16B_TAIL(RW);
12484 }
12485 else
12486 {
12487 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12488 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12489 BODY_CMPXCHG16B_TAIL(ATOMIC);
12490 }
12491
12492#else
12493 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12494 accesses and not all all atomic, which works fine on in UNI CPU guest
12495 configuration (ignoring DMA). If guest SMP is active we have no choice
12496 but to use a rendezvous callback here. Sigh. */
12497 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12498 {
12499 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12500 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12501 BODY_CMPXCHG16B_TAIL(RW);
12502 }
12503 else
12504 {
12505 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12506 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12507 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12508 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12509 iemCImpl_cmpxchg16b_fallback_rendezvous,
12510 pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12511 IEM_MC_END();
12512 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12513 }
12514#endif
12515
12516#undef BODY_CMPXCHG16B
12517 }
12518 Log(("cmpxchg16b -> #UD\n"));
12519 IEMOP_RAISE_INVALID_OPCODE_RET();
12520}
12521
12522FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12523{
12524 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12525 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12526 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12527}
12528
12529
12530/** Opcode 0x0f 0xc7 11/6. */
12531FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12532{
12533 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12534 IEMOP_RAISE_INVALID_OPCODE_RET();
12535
12536 if (IEM_IS_MODRM_REG_MODE(bRm))
12537 {
12538 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12540 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12541 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12542 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12543 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12544 iemCImpl_rdrand, iReg, enmEffOpSize);
12545 IEM_MC_END();
12546 }
12547 /* Register only. */
12548 else
12549 IEMOP_RAISE_INVALID_OPCODE_RET();
12550}
12551
12552/** Opcode 0x0f 0xc7 !11/6. */
12553#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12554FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12555{
12556 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12557 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12558 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12559 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12560 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12562 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12563 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12564 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12565 IEM_MC_END();
12566}
12567#else
12568FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12569#endif
12570
12571/** Opcode 0x66 0x0f 0xc7 !11/6. */
12572#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12573FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12574{
12575 IEMOP_MNEMONIC(vmclear, "vmclear");
12576 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12577 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12578 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12579 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12581 IEMOP_HLP_DONE_DECODING();
12582 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12583 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12584 IEM_MC_END();
12585}
12586#else
12587FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12588#endif
12589
12590/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12591#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12592FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12593{
12594 IEMOP_MNEMONIC(vmxon, "vmxon");
12595 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12596 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12597 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12599 IEMOP_HLP_DONE_DECODING();
12600 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12601 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12602 IEM_MC_END();
12603}
12604#else
12605FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12606#endif
12607
12608/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12609#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12610FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12611{
12612 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12613 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12614 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12615 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12616 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12618 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12619 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12620 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12621 IEM_MC_END();
12622}
12623#else
12624FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12625#endif
12626
12627/** Opcode 0x0f 0xc7 11/7. */
12628FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12629{
12630 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12631 IEMOP_RAISE_INVALID_OPCODE_RET();
12632
12633 if (IEM_IS_MODRM_REG_MODE(bRm))
12634 {
12635 /* register destination. */
12636 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12638 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12639 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12640 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12641 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12642 iemCImpl_rdseed, iReg, enmEffOpSize);
12643 IEM_MC_END();
12644 }
12645 /* Register only. */
12646 else
12647 IEMOP_RAISE_INVALID_OPCODE_RET();
12648}
12649
12650/**
12651 * Group 9 jump table for register variant.
12652 */
12653IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12654{ /* pfx: none, 066h, 0f3h, 0f2h */
12655 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12656 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12657 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12658 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12659 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12660 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12661 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12662 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12663};
12664AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12665
12666
12667/**
12668 * Group 9 jump table for memory variant.
12669 */
12670IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12671{ /* pfx: none, 066h, 0f3h, 0f2h */
12672 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12673 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12674 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12675 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12676 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12677 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12678 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12679 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12680};
12681AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12682
12683
12684/** Opcode 0x0f 0xc7. */
12685FNIEMOP_DEF(iemOp_Grp9)
12686{
12687 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12688 if (IEM_IS_MODRM_REG_MODE(bRm))
12689 /* register, register */
12690 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12691 + pVCpu->iem.s.idxPrefix], bRm);
12692 /* memory, register */
12693 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12694 + pVCpu->iem.s.idxPrefix], bRm);
12695}
12696
12697
12698/**
12699 * Common 'bswap register' helper.
12700 */
12701FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12702{
12703 switch (pVCpu->iem.s.enmEffOpSize)
12704 {
12705 case IEMMODE_16BIT:
12706 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
12707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12708 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12709 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12710 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12711 IEM_MC_ADVANCE_RIP_AND_FINISH();
12712 IEM_MC_END();
12713 break;
12714
12715 case IEMMODE_32BIT:
12716 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
12717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12718 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12719 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12720 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12721 IEM_MC_CLEAR_HIGH_GREG_U64(iReg);
12722 IEM_MC_ADVANCE_RIP_AND_FINISH();
12723 IEM_MC_END();
12724 break;
12725
12726 case IEMMODE_64BIT:
12727 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
12728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12729 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12730 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12731 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12732 IEM_MC_ADVANCE_RIP_AND_FINISH();
12733 IEM_MC_END();
12734 break;
12735
12736 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12737 }
12738}
12739
12740
12741/** Opcode 0x0f 0xc8. */
12742FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12743{
12744 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12745 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12746 prefix. REX.B is the correct prefix it appears. For a parallel
12747 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12748 IEMOP_HLP_MIN_486();
12749 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12750}
12751
12752
12753/** Opcode 0x0f 0xc9. */
12754FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12755{
12756 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12757 IEMOP_HLP_MIN_486();
12758 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12759}
12760
12761
12762/** Opcode 0x0f 0xca. */
12763FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12764{
12765 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12766 IEMOP_HLP_MIN_486();
12767 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12768}
12769
12770
12771/** Opcode 0x0f 0xcb. */
12772FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12773{
12774 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12775 IEMOP_HLP_MIN_486();
12776 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12777}
12778
12779
12780/** Opcode 0x0f 0xcc. */
12781FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12782{
12783 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12784 IEMOP_HLP_MIN_486();
12785 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12786}
12787
12788
12789/** Opcode 0x0f 0xcd. */
12790FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12791{
12792 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12793 IEMOP_HLP_MIN_486();
12794 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12795}
12796
12797
12798/** Opcode 0x0f 0xce. */
12799FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12800{
12801 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12802 IEMOP_HLP_MIN_486();
12803 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12804}
12805
12806
12807/** Opcode 0x0f 0xcf. */
12808FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12809{
12810 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12811 IEMOP_HLP_MIN_486();
12812 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12813}
12814
12815
12816/* Opcode 0x0f 0xd0 - invalid */
12817
12818
12819/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12820FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12821{
12822 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12823 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12824}
12825
12826
12827/* Opcode 0xf3 0x0f 0xd0 - invalid */
12828
12829
12830/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12831FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12832{
12833 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12834 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12835}
12836
12837
12838
12839/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12840FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12841{
12842 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12843 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12844}
12845
12846/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12847FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12848{
12849 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12850 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12851}
12852
12853/* Opcode 0xf3 0x0f 0xd1 - invalid */
12854/* Opcode 0xf2 0x0f 0xd1 - invalid */
12855
12856/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12857FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12858{
12859 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12860 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12861}
12862
12863
12864/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12865FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12866{
12867 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12868 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12869}
12870
12871
12872/* Opcode 0xf3 0x0f 0xd2 - invalid */
12873/* Opcode 0xf2 0x0f 0xd2 - invalid */
12874
12875/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12876FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12877{
12878 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12879 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12880}
12881
12882
12883/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12884FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12885{
12886 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12887 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12888}
12889
12890
12891/* Opcode 0xf3 0x0f 0xd3 - invalid */
12892/* Opcode 0xf2 0x0f 0xd3 - invalid */
12893
12894
12895/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12896FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12897{
12898 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12899 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
12900}
12901
12902
12903/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12904FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12905{
12906 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12907 SSE2_OPT_BODY_FullFull_To_Full(paddq, iemAImpl_paddq_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
12908}
12909
12910
12911/* Opcode 0xf3 0x0f 0xd4 - invalid */
12912/* Opcode 0xf2 0x0f 0xd4 - invalid */
12913
12914/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12915FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12916{
12917 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12918 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmullw_u64);
12919}
12920
12921/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12922FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12923{
12924 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12925 SSE2_OPT_BODY_FullFull_To_Full(pmullw, iemAImpl_pmullw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
12926}
12927
12928
12929/* Opcode 0xf3 0x0f 0xd5 - invalid */
12930/* Opcode 0xf2 0x0f 0xd5 - invalid */
12931
12932/* Opcode 0x0f 0xd6 - invalid */
12933
12934/**
12935 * @opcode 0xd6
12936 * @oppfx 0x66
12937 * @opcpuid sse2
12938 * @opgroup og_sse2_pcksclr_datamove
12939 * @opxcpttype none
12940 * @optest op1=-1 op2=2 -> op1=2
12941 * @optest op1=0 op2=-42 -> op1=-42
12942 */
12943FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12944{
12945 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12946 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12947 if (IEM_IS_MODRM_REG_MODE(bRm))
12948 {
12949 /*
12950 * Register, register.
12951 */
12952 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12954 IEM_MC_LOCAL(uint64_t, uSrc);
12955
12956 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12957 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12958
12959 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12960 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12961
12962 IEM_MC_ADVANCE_RIP_AND_FINISH();
12963 IEM_MC_END();
12964 }
12965 else
12966 {
12967 /*
12968 * Memory, register.
12969 */
12970 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12971 IEM_MC_LOCAL(uint64_t, uSrc);
12972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12973
12974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12976 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12977 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12978
12979 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12980 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12981
12982 IEM_MC_ADVANCE_RIP_AND_FINISH();
12983 IEM_MC_END();
12984 }
12985}
12986
12987
12988/**
12989 * @opcode 0xd6
12990 * @opcodesub 11 mr/reg
12991 * @oppfx f3
12992 * @opcpuid sse2
12993 * @opgroup og_sse2_simdint_datamove
12994 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12995 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12996 */
12997FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12998{
12999 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13000 if (IEM_IS_MODRM_REG_MODE(bRm))
13001 {
13002 /*
13003 * Register, register.
13004 */
13005 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13006 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13008 IEM_MC_LOCAL(uint64_t, uSrc);
13009
13010 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13011 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13012 IEM_MC_FPU_TO_MMX_MODE();
13013
13014 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
13015 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
13016
13017 IEM_MC_ADVANCE_RIP_AND_FINISH();
13018 IEM_MC_END();
13019 }
13020
13021 /**
13022 * @opdone
13023 * @opmnemonic udf30fd6mem
13024 * @opcode 0xd6
13025 * @opcodesub !11 mr/reg
13026 * @oppfx f3
13027 * @opunused intel-modrm
13028 * @opcpuid sse
13029 * @optest ->
13030 */
13031 else
13032 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13033}
13034
13035
13036/**
13037 * @opcode 0xd6
13038 * @opcodesub 11 mr/reg
13039 * @oppfx f2
13040 * @opcpuid sse2
13041 * @opgroup og_sse2_simdint_datamove
13042 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13043 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13044 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
13045 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
13046 * @optest op1=-42 op2=0xfedcba9876543210
13047 * -> op1=0xfedcba9876543210 ftw=0xff
13048 */
13049FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
13050{
13051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13052 if (IEM_IS_MODRM_REG_MODE(bRm))
13053 {
13054 /*
13055 * Register, register.
13056 */
13057 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13058 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13060 IEM_MC_LOCAL(uint64_t, uSrc);
13061
13062 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13063 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13064 IEM_MC_FPU_TO_MMX_MODE();
13065
13066 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
13067 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
13068
13069 IEM_MC_ADVANCE_RIP_AND_FINISH();
13070 IEM_MC_END();
13071 }
13072
13073 /**
13074 * @opdone
13075 * @opmnemonic udf20fd6mem
13076 * @opcode 0xd6
13077 * @opcodesub !11 mr/reg
13078 * @oppfx f2
13079 * @opunused intel-modrm
13080 * @opcpuid sse
13081 * @optest ->
13082 */
13083 else
13084 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13085}
13086
13087
13088/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
13089FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
13090{
13091 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13092 /* Docs says register only. */
13093 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13094 {
13095 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13096 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
13097 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13099 IEM_MC_ARG(uint64_t *, puDst, 0);
13100 IEM_MC_ARG(uint64_t const *, puSrc, 1);
13101 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13102 IEM_MC_PREPARE_FPU_USAGE();
13103 IEM_MC_FPU_TO_MMX_MODE();
13104
13105 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13106 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
13107 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
13108
13109 IEM_MC_ADVANCE_RIP_AND_FINISH();
13110 IEM_MC_END();
13111 }
13112 else
13113 IEMOP_RAISE_INVALID_OPCODE_RET();
13114}
13115
13116
13117/** Opcode 0x66 0x0f 0xd7 - */
13118FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
13119{
13120 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13121 /* Docs says register only. */
13122 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13123 {
13124 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13125 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
13126 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13128 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13129 IEM_MC_PREPARE_SSE_USAGE();
13130 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
13131 IEM_MC_LIVENESS_GREG_CLOBBER(IEM_GET_MODRM_REG(pVCpu, bRm));
13132 IEM_MC_LIVENESS_XREG_INPUT(IEM_GET_MODRM_RM(pVCpu, bRm));
13133 IEM_MC_NATIVE_EMIT_2(iemNativeEmit_pmovmskb_rr_u128, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
13134 } IEM_MC_NATIVE_ELSE() {
13135 IEM_MC_ARG(uint64_t *, puDst, 0);
13136 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
13137 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13138 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
13139 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
13140 } IEM_MC_NATIVE_ENDIF();
13141 IEM_MC_ADVANCE_RIP_AND_FINISH();
13142 IEM_MC_END();
13143 }
13144 else
13145 IEMOP_RAISE_INVALID_OPCODE_RET();
13146}
13147
13148
13149/* Opcode 0xf3 0x0f 0xd7 - invalid */
13150/* Opcode 0xf2 0x0f 0xd7 - invalid */
13151
13152
13153/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
13154FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
13155{
13156 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13157 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubusb_u64);
13158}
13159
13160
13161/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
13162FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
13163{
13164 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13165 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubusb_u128);
13166}
13167
13168
13169/* Opcode 0xf3 0x0f 0xd8 - invalid */
13170/* Opcode 0xf2 0x0f 0xd8 - invalid */
13171
13172/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13173FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13174{
13175 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13176 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubusw_u64);
13177}
13178
13179
13180/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13181FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13182{
13183 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13184 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubusw_u128);
13185}
13186
13187
13188/* Opcode 0xf3 0x0f 0xd9 - invalid */
13189/* Opcode 0xf2 0x0f 0xd9 - invalid */
13190
13191/** Opcode 0x0f 0xda - pminub Pq, Qq */
13192FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13193{
13194 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13195 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pminub_u64);
13196}
13197
13198
13199/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13200FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13201{
13202 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13203 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pminub_u128);
13204}
13205
13206/* Opcode 0xf3 0x0f 0xda - invalid */
13207/* Opcode 0xf2 0x0f 0xda - invalid */
13208
13209/** Opcode 0x0f 0xdb - pand Pq, Qq */
13210FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13211{
13212 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13213 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pand_u64);
13214}
13215
13216
13217/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13218FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13219{
13220 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13221 SSE2_OPT_BODY_FullFull_To_Full(pand, iemAImpl_pand_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13222}
13223
13224
13225/* Opcode 0xf3 0x0f 0xdb - invalid */
13226/* Opcode 0xf2 0x0f 0xdb - invalid */
13227
13228/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13229FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13230{
13231 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13232 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddusb_u64);
13233}
13234
13235
13236/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13237FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13238{
13239 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13240 SSE2_OPT_BODY_FullFull_To_Full(paddusb, iemAImpl_paddusb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13241}
13242
13243
13244/* Opcode 0xf3 0x0f 0xdc - invalid */
13245/* Opcode 0xf2 0x0f 0xdc - invalid */
13246
13247/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13248FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13249{
13250 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13251 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddusw_u64);
13252}
13253
13254
13255/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13256FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13257{
13258 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13259 SSE2_OPT_BODY_FullFull_To_Full(paddusw, iemAImpl_paddusw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13260}
13261
13262
13263/* Opcode 0xf3 0x0f 0xdd - invalid */
13264/* Opcode 0xf2 0x0f 0xdd - invalid */
13265
13266/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13267FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13268{
13269 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13270 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmaxub_u64);
13271}
13272
13273
13274/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13275FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13276{
13277 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13278 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaxub_u128);
13279}
13280
13281/* Opcode 0xf3 0x0f 0xde - invalid */
13282/* Opcode 0xf2 0x0f 0xde - invalid */
13283
13284
13285/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13286FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13287{
13288 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13289 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pandn_u64);
13290}
13291
13292
13293/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13294FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13295{
13296 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13297 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pandn_u128);
13298}
13299
13300
13301/* Opcode 0xf3 0x0f 0xdf - invalid */
13302/* Opcode 0xf2 0x0f 0xdf - invalid */
13303
13304/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13305FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13306{
13307 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13308 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13309}
13310
13311
13312/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13313FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13314{
13315 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13316 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13317}
13318
13319
13320/* Opcode 0xf3 0x0f 0xe0 - invalid */
13321/* Opcode 0xf2 0x0f 0xe0 - invalid */
13322
13323/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13324FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13325{
13326 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13327 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13328}
13329
13330
13331/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13332FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13333{
13334 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13335 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13336}
13337
13338
13339/* Opcode 0xf3 0x0f 0xe1 - invalid */
13340/* Opcode 0xf2 0x0f 0xe1 - invalid */
13341
13342/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13343FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13344{
13345 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13346 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13347}
13348
13349
13350/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13351FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13352{
13353 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13354 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13355}
13356
13357
13358/* Opcode 0xf3 0x0f 0xe2 - invalid */
13359/* Opcode 0xf2 0x0f 0xe2 - invalid */
13360
13361/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13362FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13363{
13364 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13365 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13366}
13367
13368
13369/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13370FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13371{
13372 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13373 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13374}
13375
13376
13377/* Opcode 0xf3 0x0f 0xe3 - invalid */
13378/* Opcode 0xf2 0x0f 0xe3 - invalid */
13379
13380/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13381FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13382{
13383 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13384 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13385}
13386
13387
13388/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13389FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13390{
13391 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13392 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13393}
13394
13395
13396/* Opcode 0xf3 0x0f 0xe4 - invalid */
13397/* Opcode 0xf2 0x0f 0xe4 - invalid */
13398
13399/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13400FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13401{
13402 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13403 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmulhw_u64);
13404}
13405
13406
13407/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13408FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13409{
13410 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13411 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhw_u128);
13412}
13413
13414
13415/* Opcode 0xf3 0x0f 0xe5 - invalid */
13416/* Opcode 0xf2 0x0f 0xe5 - invalid */
13417/* Opcode 0x0f 0xe6 - invalid */
13418
13419
13420/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13421FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13422{
13423 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13424 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13425}
13426
13427
13428/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13429FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13430{
13431 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13432 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13433}
13434
13435
13436/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13437FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13438{
13439 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13440 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13441}
13442
13443
13444/**
13445 * @opcode 0xe7
13446 * @opcodesub !11 mr/reg
13447 * @oppfx none
13448 * @opcpuid sse
13449 * @opgroup og_sse1_cachect
13450 * @opxcpttype none
13451 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13452 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13453 */
13454FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13455{
13456 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13457 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13458 if (IEM_IS_MODRM_MEM_MODE(bRm))
13459 {
13460 /* Register, memory. */
13461 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13462 IEM_MC_LOCAL(uint64_t, uSrc);
13463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13464
13465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13467 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13468 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13469 IEM_MC_FPU_TO_MMX_MODE();
13470
13471 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13472 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13473
13474 IEM_MC_ADVANCE_RIP_AND_FINISH();
13475 IEM_MC_END();
13476 }
13477 /**
13478 * @opdone
13479 * @opmnemonic ud0fe7reg
13480 * @opcode 0xe7
13481 * @opcodesub 11 mr/reg
13482 * @oppfx none
13483 * @opunused immediate
13484 * @opcpuid sse
13485 * @optest ->
13486 */
13487 else
13488 IEMOP_RAISE_INVALID_OPCODE_RET();
13489}
13490
13491/**
13492 * @opcode 0xe7
13493 * @opcodesub !11 mr/reg
13494 * @oppfx 0x66
13495 * @opcpuid sse2
13496 * @opgroup og_sse2_cachect
13497 * @opxcpttype 1
13498 * @optest op1=-1 op2=2 -> op1=2
13499 * @optest op1=0 op2=-42 -> op1=-42
13500 */
13501FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13502{
13503 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13504 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13505 if (IEM_IS_MODRM_MEM_MODE(bRm))
13506 {
13507 /* Register, memory. */
13508 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13509 IEM_MC_LOCAL(RTUINT128U, uSrc);
13510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13511
13512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13514 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13515 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13516
13517 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13518 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13519
13520 IEM_MC_ADVANCE_RIP_AND_FINISH();
13521 IEM_MC_END();
13522 }
13523
13524 /**
13525 * @opdone
13526 * @opmnemonic ud660fe7reg
13527 * @opcode 0xe7
13528 * @opcodesub 11 mr/reg
13529 * @oppfx 0x66
13530 * @opunused immediate
13531 * @opcpuid sse
13532 * @optest ->
13533 */
13534 else
13535 IEMOP_RAISE_INVALID_OPCODE_RET();
13536}
13537
13538/* Opcode 0xf3 0x0f 0xe7 - invalid */
13539/* Opcode 0xf2 0x0f 0xe7 - invalid */
13540
13541
13542/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13543FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13544{
13545 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13546 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubsb_u64);
13547}
13548
13549
13550/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13551FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13552{
13553 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13554 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubsb_u128);
13555}
13556
13557
13558/* Opcode 0xf3 0x0f 0xe8 - invalid */
13559/* Opcode 0xf2 0x0f 0xe8 - invalid */
13560
13561/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13562FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13563{
13564 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13565 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubsw_u64);
13566}
13567
13568
13569/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13570FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13571{
13572 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13573 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubsw_u128);
13574}
13575
13576
13577/* Opcode 0xf3 0x0f 0xe9 - invalid */
13578/* Opcode 0xf2 0x0f 0xe9 - invalid */
13579
13580
13581/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13582FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13583{
13584 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13585 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pminsw_u64);
13586}
13587
13588
13589/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13590FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13591{
13592 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13593 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pminsw_u128);
13594}
13595
13596
13597/* Opcode 0xf3 0x0f 0xea - invalid */
13598/* Opcode 0xf2 0x0f 0xea - invalid */
13599
13600
13601/** Opcode 0x0f 0xeb - por Pq, Qq */
13602FNIEMOP_DEF(iemOp_por_Pq_Qq)
13603{
13604 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13605 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_por_u64);
13606}
13607
13608
13609/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13610FNIEMOP_DEF(iemOp_por_Vx_Wx)
13611{
13612 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13613 SSE2_OPT_BODY_FullFull_To_Full(por, iemAImpl_por_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13614}
13615
13616
13617/* Opcode 0xf3 0x0f 0xeb - invalid */
13618/* Opcode 0xf2 0x0f 0xeb - invalid */
13619
13620/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13621FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13622{
13623 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13624 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddsb_u64);
13625}
13626
13627
13628/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13629FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13630{
13631 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13632 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddsb_u128);
13633}
13634
13635
13636/* Opcode 0xf3 0x0f 0xec - invalid */
13637/* Opcode 0xf2 0x0f 0xec - invalid */
13638
13639/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13640FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13641{
13642 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13643 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddsw_u64);
13644}
13645
13646
13647/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13648FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13649{
13650 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13651 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddsw_u128);
13652}
13653
13654
13655/* Opcode 0xf3 0x0f 0xed - invalid */
13656/* Opcode 0xf2 0x0f 0xed - invalid */
13657
13658
13659/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13660FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13661{
13662 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13663 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13664}
13665
13666
13667/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13668FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13669{
13670 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13671 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13672}
13673
13674
13675/* Opcode 0xf3 0x0f 0xee - invalid */
13676/* Opcode 0xf2 0x0f 0xee - invalid */
13677
13678
13679/** Opcode 0x0f 0xef - pxor Pq, Qq */
13680FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13681{
13682 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13683 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pxor_u64);
13684}
13685
13686
13687/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13688FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13689{
13690 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13691 SSE2_OPT_BODY_FullFull_To_Full(pxor, iemAImpl_pxor_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13692}
13693
13694
13695/* Opcode 0xf3 0x0f 0xef - invalid */
13696/* Opcode 0xf2 0x0f 0xef - invalid */
13697
13698/* Opcode 0x0f 0xf0 - invalid */
13699/* Opcode 0x66 0x0f 0xf0 - invalid */
13700
13701
13702/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13703FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13704{
13705 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13706 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13707 if (IEM_IS_MODRM_REG_MODE(bRm))
13708 {
13709 /*
13710 * Register, register - (not implemented, assuming it raises \#UD).
13711 */
13712 IEMOP_RAISE_INVALID_OPCODE_RET();
13713 }
13714 else
13715 {
13716 /*
13717 * Register, memory.
13718 */
13719 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13720 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13722
13723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
13725 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13726 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13727 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13728 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13729
13730 IEM_MC_ADVANCE_RIP_AND_FINISH();
13731 IEM_MC_END();
13732 }
13733}
13734
13735
13736/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13737FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13738{
13739 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13740 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13741}
13742
13743
13744/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13745FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13746{
13747 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13748 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13749}
13750
13751
13752/* Opcode 0xf2 0x0f 0xf1 - invalid */
13753
13754/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13755FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13756{
13757 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13758 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13759}
13760
13761
13762/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13763FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13764{
13765 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13766 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13767}
13768
13769
13770/* Opcode 0xf2 0x0f 0xf2 - invalid */
13771
13772/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13773FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13774{
13775 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13776 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13777}
13778
13779
13780/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13781FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13782{
13783 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13784 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13785}
13786
13787/* Opcode 0xf2 0x0f 0xf3 - invalid */
13788
13789/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13790FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13791{
13792 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13793 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmuludq_u64);
13794}
13795
13796
13797/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13798FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13799{
13800 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13801 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmuludq_u128);
13802}
13803
13804
13805/* Opcode 0xf2 0x0f 0xf4 - invalid */
13806
13807/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13808FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13809{
13810 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13811 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13812}
13813
13814
13815/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13816FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13817{
13818 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13819 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13820}
13821
13822/* Opcode 0xf2 0x0f 0xf5 - invalid */
13823
13824/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13825FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13826{
13827 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13828 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13829}
13830
13831
13832/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13833FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13834{
13835 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13836 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13837}
13838
13839
13840/* Opcode 0xf2 0x0f 0xf6 - invalid */
13841
13842/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13843FNIEMOP_DEF(iemOp_maskmovq_Pq_Nq)
13844{
13845// IEMOP_MNEMONIC2(RM, MASKMOVQ, maskmovq, Pq, Nq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
13846 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13847 if (IEM_IS_MODRM_REG_MODE(bRm))
13848 {
13849 /*
13850 * MMX, MMX, (implicit) [ ER]DI
13851 */
13852 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13854 IEM_MC_LOCAL( uint64_t, u64EffAddr);
13855 IEM_MC_LOCAL( uint64_t, u64Mem);
13856 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Mem, u64Mem, 0);
13857 IEM_MC_ARG( uint64_t const *, puSrc, 1);
13858 IEM_MC_ARG( uint64_t const *, puMsk, 2);
13859 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13860 IEM_MC_PREPARE_FPU_USAGE();
13861 IEM_MC_FPU_TO_MMX_MODE();
13862
13863 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
13864 IEM_MC_FETCH_MEM_U64(u64Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
13865 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_REG_8(bRm));
13866 IEM_MC_REF_MREG_U64_CONST(puMsk, IEM_GET_MODRM_RM_8(bRm));
13867 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovq_u64, pu64Mem, puSrc, puMsk);
13868 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, u64EffAddr, u64Mem);
13869
13870 IEM_MC_ADVANCE_RIP_AND_FINISH();
13871 IEM_MC_END();
13872 }
13873 else
13874 {
13875 /* The memory, register encoding is invalid. */
13876 IEMOP_RAISE_INVALID_OPCODE_RET();
13877 }
13878}
13879
13880
13881/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13882FNIEMOP_DEF(iemOp_maskmovdqu_Vdq_Udq)
13883{
13884// IEMOP_MNEMONIC2(RM, MASKMOVDQU, maskmovdqu, Vdq, Udq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
13885 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13886 if (IEM_IS_MODRM_REG_MODE(bRm))
13887 {
13888 /*
13889 * XMM, XMM, (implicit) [ ER]DI
13890 */
13891 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13893 IEM_MC_LOCAL( uint64_t, u64EffAddr);
13894 IEM_MC_LOCAL( RTUINT128U, u128Mem);
13895 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128Mem, u128Mem, 0);
13896 IEM_MC_ARG( PCRTUINT128U, puSrc, 1);
13897 IEM_MC_ARG( PCRTUINT128U, puMsk, 2);
13898 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13899 IEM_MC_PREPARE_SSE_USAGE();
13900
13901 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
13902 IEM_MC_FETCH_MEM_U128(u128Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
13903 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13904 IEM_MC_REF_XREG_U128_CONST(puMsk, IEM_GET_MODRM_RM(pVCpu, bRm));
13905 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovdqu_u128, pu128Mem, puSrc, puMsk);
13906 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, u64EffAddr, u128Mem);
13907
13908 IEM_MC_ADVANCE_RIP_AND_FINISH();
13909 IEM_MC_END();
13910 }
13911 else
13912 {
13913 /* The memory, register encoding is invalid. */
13914 IEMOP_RAISE_INVALID_OPCODE_RET();
13915 }
13916}
13917
13918
13919/* Opcode 0xf2 0x0f 0xf7 - invalid */
13920
13921
13922/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13923FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13924{
13925 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13926 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubb_u64);
13927}
13928
13929
13930/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13931FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13932{
13933 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13934 SSE2_OPT_BODY_FullFull_To_Full(psubb, iemAImpl_psubb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13935}
13936
13937
13938/* Opcode 0xf2 0x0f 0xf8 - invalid */
13939
13940
13941/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13942FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13943{
13944 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13945 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubw_u64);
13946}
13947
13948
13949/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13950FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13951{
13952 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13953 SSE2_OPT_BODY_FullFull_To_Full(psubw, iemAImpl_psubw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13954}
13955
13956
13957/* Opcode 0xf2 0x0f 0xf9 - invalid */
13958
13959
13960/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13961FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13962{
13963 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13964 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubd_u64);
13965}
13966
13967
13968/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13969FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13970{
13971 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13972 SSE2_OPT_BODY_FullFull_To_Full(psubd, iemAImpl_psubd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13973}
13974
13975
13976/* Opcode 0xf2 0x0f 0xfa - invalid */
13977
13978
13979/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13980FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13981{
13982 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13983 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
13984}
13985
13986
13987/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13988FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13989{
13990 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13991 SSE2_OPT_BODY_FullFull_To_Full(psubq, iemAImpl_psubq_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13992}
13993
13994
13995/* Opcode 0xf2 0x0f 0xfb - invalid */
13996
13997
13998/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13999FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
14000{
14001 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14002 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddb_u64);
14003}
14004
14005
14006/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
14007FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
14008{
14009 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14010 SSE2_OPT_BODY_FullFull_To_Full(paddb, iemAImpl_paddb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14011}
14012
14013
14014/* Opcode 0xf2 0x0f 0xfc - invalid */
14015
14016
14017/** Opcode 0x0f 0xfd - paddw Pq, Qq */
14018FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
14019{
14020 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14021 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddw_u64);
14022}
14023
14024
14025/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
14026FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
14027{
14028 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14029 SSE2_OPT_BODY_FullFull_To_Full(paddw, iemAImpl_paddw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14030}
14031
14032
14033/* Opcode 0xf2 0x0f 0xfd - invalid */
14034
14035
14036/** Opcode 0x0f 0xfe - paddd Pq, Qq */
14037FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
14038{
14039 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14040 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddd_u64);
14041}
14042
14043
14044/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
14045FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
14046{
14047 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14048 SSE2_OPT_BODY_FullFull_To_Full(paddd, iemAImpl_paddd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14049}
14050
14051
14052/* Opcode 0xf2 0x0f 0xfe - invalid */
14053
14054
14055/** Opcode **** 0x0f 0xff - UD0 */
14056FNIEMOP_DEF(iemOp_ud0)
14057{
14058 IEMOP_MNEMONIC(ud0, "ud0");
14059 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
14060 {
14061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
14062 if (IEM_IS_MODRM_MEM_MODE(bRm))
14063 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
14064 }
14065 IEMOP_HLP_DONE_DECODING();
14066 IEMOP_RAISE_INVALID_OPCODE_RET();
14067}
14068
14069
14070
14071/**
14072 * Two byte opcode map, first byte 0x0f.
14073 *
14074 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
14075 * check if it needs updating as well when making changes.
14076 */
14077const PFNIEMOP g_apfnTwoByteMap[] =
14078{
14079 /* no prefix, 066h prefix f3h prefix, f2h prefix */
14080 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
14081 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
14082 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
14083 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
14084 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
14085 /* 0x05 */ IEMOP_X4(iemOp_syscall),
14086 /* 0x06 */ IEMOP_X4(iemOp_clts),
14087 /* 0x07 */ IEMOP_X4(iemOp_sysret),
14088 /* 0x08 */ IEMOP_X4(iemOp_invd),
14089 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
14090 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
14091 /* 0x0b */ IEMOP_X4(iemOp_ud2),
14092 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
14093 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
14094 /* 0x0e */ IEMOP_X4(iemOp_femms),
14095 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
14096
14097 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
14098 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
14099 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
14100 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14101 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14102 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14103 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
14104 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14105 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
14106 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
14107 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
14108 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
14109 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
14110 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
14111 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
14112 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
14113
14114 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
14115 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
14116 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
14117 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
14118 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
14119 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14120 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
14121 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14122 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14123 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14124 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
14125 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14126 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
14127 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
14128 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14129 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14130
14131 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
14132 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
14133 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
14134 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
14135 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
14136 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
14137 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
14138 /* 0x37 */ IEMOP_X4(iemOp_getsec),
14139 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
14140 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14141 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
14142 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14143 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14144 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14145 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14146 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14147
14148 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
14149 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
14150 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
14151 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
14152 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
14153 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
14154 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
14155 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
14156 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
14157 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
14158 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
14159 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
14160 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
14161 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
14162 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
14163 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
14164
14165 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14166 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
14167 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
14168 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
14169 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14170 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14171 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14172 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14173 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
14174 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
14175 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
14176 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
14177 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
14178 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
14179 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
14180 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
14181
14182 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14183 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14184 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14185 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14186 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14187 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14188 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14189 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14190 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14191 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14192 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14193 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14194 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14195 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14196 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14197 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
14198
14199 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
14200 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
14201 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
14202 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
14203 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14204 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14205 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14206 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14207
14208 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14209 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14210 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14211 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14212 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
14213 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
14214 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
14215 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
14216
14217 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
14218 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
14219 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
14220 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
14221 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
14222 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
14223 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
14224 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
14225 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
14226 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
14227 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
14228 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
14229 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14230 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14231 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14232 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14233
14234 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14235 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14236 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14237 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14238 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14239 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14240 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14241 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14242 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14243 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14244 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14245 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14246 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14247 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14248 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14249 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14250
14251 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14252 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14253 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14254 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14255 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14256 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14257 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14258 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14259 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14260 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14261 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14262 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14263 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14264 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14265 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14266 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14267
14268 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14269 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14270 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14271 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14272 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14273 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14274 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14275 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14276 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14277 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14278 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14279 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14280 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14281 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14282 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14283 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14284
14285 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14286 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14287 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14288 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14289 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14290 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14291 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14292 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14293 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14294 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14295 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14296 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14297 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14298 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14299 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14300 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14301
14302 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14303 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14304 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14305 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14306 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14307 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14308 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14309 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14310 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14311 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14312 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14313 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14314 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14315 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14316 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14317 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14318
14319 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14320 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14321 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14322 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14323 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14324 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14325 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14326 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14327 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14328 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14329 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14330 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14331 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14332 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14333 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14334 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14335
14336 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14337 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14338 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14339 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14340 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14341 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14342 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14343 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14344 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14345 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14346 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14347 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14348 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14349 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14350 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14351 /* 0xff */ IEMOP_X4(iemOp_ud0),
14352};
14353AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14354
14355/** @} */
14356
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette