VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h@ 105904

Last change on this file since 105904 was 105699, checked in by vboxsync, 4 months ago

VMM/IEM: Fix disassembly metadata for movq_Pq_Qq, vcmpss, vcmpsd instructions, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 518.0 KB
Line 
1/* $Id: IEMAllInstTwoByte0f.cpp.h 105699 2024-08-16 06:47:00Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 *
42 * The @a pfnU64 worker function takes no FXSAVE state, just the operands.
43 */
44FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
45{
46 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
47 if (IEM_IS_MODRM_REG_MODE(bRm))
48 {
49 /*
50 * MMX, MMX.
51 */
52 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
53 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
54 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
55 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
56 IEM_MC_ARG(uint64_t *, pDst, 0);
57 IEM_MC_ARG(uint64_t const *, pSrc, 1);
58 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
59 IEM_MC_PREPARE_FPU_USAGE();
60 IEM_MC_FPU_TO_MMX_MODE();
61
62 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
63 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
64 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
65 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
66
67 IEM_MC_ADVANCE_RIP_AND_FINISH();
68 IEM_MC_END();
69 }
70 else
71 {
72 /*
73 * MMX, [mem64].
74 */
75 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
76 IEM_MC_ARG(uint64_t *, pDst, 0);
77 IEM_MC_LOCAL(uint64_t, uSrc);
78 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
79 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
80
81 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
82 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
83 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
84 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
85
86 IEM_MC_PREPARE_FPU_USAGE();
87 IEM_MC_FPU_TO_MMX_MODE();
88
89 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
90 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
91 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
92
93 IEM_MC_ADVANCE_RIP_AND_FINISH();
94 IEM_MC_END();
95 }
96}
97
98
99/**
100 * Common worker for MMX instructions on the form:
101 * pxxx mm1, mm2/mem64
102 * for instructions introduced with SSE.
103 *
104 * The @a pfnU64 worker function takes no FXSAVE state, just the operands.
105 */
106FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
107{
108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
109 if (IEM_IS_MODRM_REG_MODE(bRm))
110 {
111 /*
112 * MMX, MMX.
113 */
114 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
115 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
116 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
118 IEM_MC_ARG(uint64_t *, pDst, 0);
119 IEM_MC_ARG(uint64_t const *, pSrc, 1);
120 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
121 IEM_MC_PREPARE_FPU_USAGE();
122 IEM_MC_FPU_TO_MMX_MODE();
123
124 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
125 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
126 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
127 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
128
129 IEM_MC_ADVANCE_RIP_AND_FINISH();
130 IEM_MC_END();
131 }
132 else
133 {
134 /*
135 * MMX, [mem64].
136 */
137 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
138 IEM_MC_ARG(uint64_t *, pDst, 0);
139 IEM_MC_LOCAL(uint64_t, uSrc);
140 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
142
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
145 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
146 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
147
148 IEM_MC_PREPARE_FPU_USAGE();
149 IEM_MC_FPU_TO_MMX_MODE();
150
151 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
152 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
153 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
154
155 IEM_MC_ADVANCE_RIP_AND_FINISH();
156 IEM_MC_END();
157 }
158}
159
160
161/**
162 * Common worker for MMX instructions on the form:
163 * pxxx mm1, mm2/mem64
164 * that was introduced with SSE2.
165 */
166FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
167{
168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
169 if (IEM_IS_MODRM_REG_MODE(bRm))
170 {
171 /*
172 * MMX, MMX.
173 */
174 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
175 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
176 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
178 IEM_MC_ARG(uint64_t *, pDst, 0);
179 IEM_MC_ARG(uint64_t const *, pSrc, 1);
180 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
181 IEM_MC_PREPARE_FPU_USAGE();
182 IEM_MC_FPU_TO_MMX_MODE();
183
184 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
185 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
186 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
187 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
188
189 IEM_MC_ADVANCE_RIP_AND_FINISH();
190 IEM_MC_END();
191 }
192 else
193 {
194 /*
195 * MMX, [mem64].
196 */
197 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
198 IEM_MC_ARG(uint64_t *, pDst, 0);
199 IEM_MC_LOCAL(uint64_t, uSrc);
200 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
202
203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
205 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
206 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
207
208 IEM_MC_PREPARE_FPU_USAGE();
209 IEM_MC_FPU_TO_MMX_MODE();
210
211 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
212 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
213 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
214
215 IEM_MC_ADVANCE_RIP_AND_FINISH();
216 IEM_MC_END();
217 }
218}
219
220
221/**
222 * Common worker for SSE instructions of the form:
223 * pxxx xmm1, xmm2/mem128
224 *
225 * Proper alignment of the 128-bit operand is enforced.
226 * SSE cpuid checks. No SIMD FP exceptions.
227 *
228 * The @a pfnU128 worker function takes no FXSAVE state, just the operands.
229 *
230 * @sa iemOpCommonSse2_FullFull_To_Full
231 */
232FNIEMOP_DEF_1(iemOpCommonSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
233{
234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
235 if (IEM_IS_MODRM_REG_MODE(bRm))
236 {
237 /*
238 * XMM, XMM.
239 */
240 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
242 IEM_MC_ARG(PRTUINT128U, pDst, 0);
243 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
244 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
245 IEM_MC_PREPARE_SSE_USAGE();
246 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
248 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
249 IEM_MC_ADVANCE_RIP_AND_FINISH();
250 IEM_MC_END();
251 }
252 else
253 {
254 /*
255 * XMM, [mem128].
256 */
257 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
258 IEM_MC_ARG(PRTUINT128U, pDst, 0);
259 IEM_MC_LOCAL(RTUINT128U, uSrc);
260 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
262
263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
265 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
266 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
267
268 IEM_MC_PREPARE_SSE_USAGE();
269 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
270 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
271
272 IEM_MC_ADVANCE_RIP_AND_FINISH();
273 IEM_MC_END();
274 }
275}
276
277
278/**
279 * Common worker for SSE2 instructions on the forms:
280 * pxxx xmm1, xmm2/mem128
281 *
282 * Proper alignment of the 128-bit operand is enforced.
283 * Exceptions type 4. SSE2 cpuid checks.
284 *
285 * The @a pfnU128 worker function takes no FXSAVE state, just the operands.
286 *
287 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
288 */
289FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
290{
291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
292 if (IEM_IS_MODRM_REG_MODE(bRm))
293 {
294 /*
295 * XMM, XMM.
296 */
297 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
299 IEM_MC_ARG(PRTUINT128U, pDst, 0);
300 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
302 IEM_MC_PREPARE_SSE_USAGE();
303 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
304 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
305 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
306 IEM_MC_ADVANCE_RIP_AND_FINISH();
307 IEM_MC_END();
308 }
309 else
310 {
311 /*
312 * XMM, [mem128].
313 */
314 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
315 IEM_MC_ARG(PRTUINT128U, pDst, 0);
316 IEM_MC_LOCAL(RTUINT128U, uSrc);
317 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
319
320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
322 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
323 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
324
325 IEM_MC_PREPARE_SSE_USAGE();
326 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
327 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
328
329 IEM_MC_ADVANCE_RIP_AND_FINISH();
330 IEM_MC_END();
331 }
332}
333
334
335/**
336 * A body preprocessor variant of iemOpCommonSse2Opt_FullFull_To_Full in order
337 * to support native emitters for certain instructions.
338 */
339#define SSE2_OPT_BODY_FullFull_To_Full(a_Ins, a_pImplExpr, a_fRegNativeArchs, a_fMemNativeArchs) \
340 PFNIEMAIMPLMEDIAOPTF2U128 const pfnU128 = (a_pImplExpr); \
341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
342 if (IEM_IS_MODRM_REG_MODE(bRm)) \
343 { \
344 /* \
345 * XMM, XMM. \
346 */ \
347 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2); \
349 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
350 IEM_MC_PREPARE_SSE_USAGE(); \
351 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
352 IEM_MC_NATIVE_EMIT_2(RT_CONCAT3(iemNativeEmit_,a_Ins,_rr_u128), IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
353 } IEM_MC_NATIVE_ELSE() { \
354 IEM_MC_ARG(PRTUINT128U, pDst, 0); \
355 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
356 IEM_MC_ARG(PCRTUINT128U, pSrc, 1); \
357 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
358 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc); \
359 } IEM_MC_NATIVE_ENDIF(); \
360 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
361 IEM_MC_END(); \
362 } \
363 else \
364 { \
365 /* \
366 * XMM, [mem128]. \
367 */ \
368 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
369 IEM_MC_LOCAL(RTUINT128U, uSrc); \
370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2); \
373 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
374 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
375 IEM_MC_PREPARE_SSE_USAGE(); \
376 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
377 IEM_MC_NATIVE_EMIT_2(RT_CONCAT3(iemNativeEmit_,a_Ins,_rv_u128), IEM_GET_MODRM_REG(pVCpu, bRm), uSrc); \
378 } IEM_MC_NATIVE_ELSE() { \
379 IEM_MC_ARG(PRTUINT128U, pDst, 0); \
380 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
381 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1); \
382 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc); \
383 } IEM_MC_NATIVE_ENDIF(); \
384 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
385 IEM_MC_END(); \
386 } void(0)
387
388
389/**
390 * Common worker for MMX instructions on the forms:
391 * pxxxx mm1, mm2/mem32
392 *
393 * The 2nd operand is the first half of a register, which in the memory case
394 * means a 32-bit memory access.
395 */
396FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
397{
398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
399 if (IEM_IS_MODRM_REG_MODE(bRm))
400 {
401 /*
402 * MMX, MMX.
403 */
404 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
406 IEM_MC_ARG(uint64_t *, puDst, 0);
407 IEM_MC_ARG(uint64_t const *, puSrc, 1);
408 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
409 IEM_MC_PREPARE_FPU_USAGE();
410 IEM_MC_FPU_TO_MMX_MODE();
411
412 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
413 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
414 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
415 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
416
417 IEM_MC_ADVANCE_RIP_AND_FINISH();
418 IEM_MC_END();
419 }
420 else
421 {
422 /*
423 * MMX, [mem32].
424 */
425 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
426 IEM_MC_ARG(uint64_t *, puDst, 0);
427 IEM_MC_LOCAL(uint64_t, uSrc);
428 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
430
431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
433 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
434 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
435
436 IEM_MC_PREPARE_FPU_USAGE();
437 IEM_MC_FPU_TO_MMX_MODE();
438
439 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
440 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
441 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
442
443 IEM_MC_ADVANCE_RIP_AND_FINISH();
444 IEM_MC_END();
445 }
446}
447
448
449/**
450 * Common worker for SSE instructions on the forms:
451 * pxxxx xmm1, xmm2/mem128
452 *
453 * The 2nd operand is the first half of a register, which in the memory case
454 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
455 *
456 * Exceptions type 4.
457 */
458FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
459{
460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
461 if (IEM_IS_MODRM_REG_MODE(bRm))
462 {
463 /*
464 * XMM, XMM.
465 */
466 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
468 IEM_MC_ARG(PRTUINT128U, puDst, 0);
469 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
470 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
471 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
472 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
473 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
474 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
475 IEM_MC_ADVANCE_RIP_AND_FINISH();
476 IEM_MC_END();
477 }
478 else
479 {
480 /*
481 * XMM, [mem128].
482 */
483 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
484 IEM_MC_ARG(PRTUINT128U, puDst, 0);
485 IEM_MC_LOCAL(RTUINT128U, uSrc);
486 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
488
489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
491 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
492 /** @todo Most CPUs probably only read the low qword. We read everything to
493 * make sure we apply segmentation and alignment checks correctly.
494 * When we have time, it would be interesting to explore what real
495 * CPUs actually does and whether it will do a TLB load for the high
496 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
497 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
498
499 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
500 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
501 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
502
503 IEM_MC_ADVANCE_RIP_AND_FINISH();
504 IEM_MC_END();
505 }
506}
507
508
509/**
510 * Common worker for SSE2 instructions on the forms:
511 * pxxxx xmm1, xmm2/mem128
512 *
513 * The 2nd operand is the first half of a register, which in the memory case
514 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
515 *
516 * Exceptions type 4.
517 */
518FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
519{
520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
521 if (IEM_IS_MODRM_REG_MODE(bRm))
522 {
523 /*
524 * XMM, XMM.
525 */
526 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
528 IEM_MC_ARG(PRTUINT128U, puDst, 0);
529 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
530 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
531 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
532 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
533 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
534 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
535 IEM_MC_ADVANCE_RIP_AND_FINISH();
536 IEM_MC_END();
537 }
538 else
539 {
540 /*
541 * XMM, [mem128].
542 */
543 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
544 IEM_MC_ARG(PRTUINT128U, puDst, 0);
545 IEM_MC_LOCAL(RTUINT128U, uSrc);
546 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
548
549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
551 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
552 /** @todo Most CPUs probably only read the low qword. We read everything to
553 * make sure we apply segmentation and alignment checks correctly.
554 * When we have time, it would be interesting to explore what real
555 * CPUs actually does and whether it will do a TLB load for the high
556 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
557 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
558
559 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
560 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
562
563 IEM_MC_ADVANCE_RIP_AND_FINISH();
564 IEM_MC_END();
565 }
566}
567
568
569/**
570 * Common worker for MMX instructions on the form:
571 * pxxxx mm1, mm2/mem64
572 *
573 * The 2nd operand is the second half of a register, which in the memory case
574 * means a 64-bit memory access for MMX.
575 */
576FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
577{
578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
579 if (IEM_IS_MODRM_REG_MODE(bRm))
580 {
581 /*
582 * MMX, MMX.
583 */
584 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
585 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
586 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
588 IEM_MC_ARG(uint64_t *, puDst, 0);
589 IEM_MC_ARG(uint64_t const *, puSrc, 1);
590 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
591 IEM_MC_PREPARE_FPU_USAGE();
592 IEM_MC_FPU_TO_MMX_MODE();
593
594 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
595 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
596 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
597 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
598
599 IEM_MC_ADVANCE_RIP_AND_FINISH();
600 IEM_MC_END();
601 }
602 else
603 {
604 /*
605 * MMX, [mem64].
606 */
607 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
608 IEM_MC_ARG(uint64_t *, puDst, 0);
609 IEM_MC_LOCAL(uint64_t, uSrc);
610 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
612
613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
615 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
616 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
617
618 IEM_MC_PREPARE_FPU_USAGE();
619 IEM_MC_FPU_TO_MMX_MODE();
620
621 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
623 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
624
625 IEM_MC_ADVANCE_RIP_AND_FINISH();
626 IEM_MC_END();
627 }
628}
629
630
631/**
632 * Common worker for SSE instructions on the form:
633 * pxxxx xmm1, xmm2/mem128
634 *
635 * The 2nd operand is the second half of a register, which for SSE a 128-bit
636 * aligned access where it may read the full 128 bits or only the upper 64 bits.
637 *
638 * Exceptions type 4.
639 */
640FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
641{
642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
643 if (IEM_IS_MODRM_REG_MODE(bRm))
644 {
645 /*
646 * XMM, XMM.
647 */
648 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
650 IEM_MC_ARG(PRTUINT128U, puDst, 0);
651 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
652 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
653 IEM_MC_PREPARE_SSE_USAGE();
654 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
655 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
656 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
657 IEM_MC_ADVANCE_RIP_AND_FINISH();
658 IEM_MC_END();
659 }
660 else
661 {
662 /*
663 * XMM, [mem128].
664 */
665 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
666 IEM_MC_ARG(PRTUINT128U, puDst, 0);
667 IEM_MC_LOCAL(RTUINT128U, uSrc);
668 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
670
671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
673 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
674 /** @todo Most CPUs probably only read the high qword. We read everything to
675 * make sure we apply segmentation and alignment checks correctly.
676 * When we have time, it would be interesting to explore what real
677 * CPUs actually does and whether it will do a TLB load for the lower
678 * part or skip any associated \#PF. */
679 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
680
681 IEM_MC_PREPARE_SSE_USAGE();
682 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
683 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
684
685 IEM_MC_ADVANCE_RIP_AND_FINISH();
686 IEM_MC_END();
687 }
688}
689
690
691/**
692 * Common worker for SSE instructions on the forms:
693 * pxxs xmm1, xmm2/mem128
694 *
695 * Proper alignment of the 128-bit operand is enforced.
696 * Exceptions type 2. SSE cpuid checks.
697 *
698 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
699 */
700FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
701{
702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
703 if (IEM_IS_MODRM_REG_MODE(bRm))
704 {
705 /*
706 * XMM128, XMM128.
707 */
708 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
710 IEM_MC_LOCAL(X86XMMREG, SseRes);
711 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
712 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
713 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
714 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
715 IEM_MC_PREPARE_SSE_USAGE();
716 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
717 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
718 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
719 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
720
721 IEM_MC_ADVANCE_RIP_AND_FINISH();
722 IEM_MC_END();
723 }
724 else
725 {
726 /*
727 * XMM128, [mem128].
728 */
729 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
730 IEM_MC_LOCAL(X86XMMREG, SseRes);
731 IEM_MC_LOCAL(X86XMMREG, uSrc2);
732 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
733 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
734 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
736
737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
739 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
740 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
741
742 IEM_MC_PREPARE_SSE_USAGE();
743 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
744 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
745 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
746
747 IEM_MC_ADVANCE_RIP_AND_FINISH();
748 IEM_MC_END();
749 }
750}
751
752
753/**
754 * A body preprocessor variant of iemOpCommonSseFp_FullFull_To_Full in order
755 * to support native emitters for certain instructions.
756 */
757#define SSE_FP_BODY_FullFull_To_Full(a_Ins, a_pImplExpr, a_fRegNativeArchs, a_fMemNativeArchs) \
758 PFNIEMAIMPLFPSSEF2U128 const pfnU128 = (a_pImplExpr); \
759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
760 if (IEM_IS_MODRM_REG_MODE(bRm)) \
761 { \
762 /* \
763 * XMM, XMM. \
764 */ \
765 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse); \
767 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
768 IEM_MC_PREPARE_SSE_USAGE(); \
769 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
770 IEM_MC_NATIVE_EMIT_2_EX(RT_CONCAT3(iemNativeEmit_,a_Ins,_rr_u128), IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
771 } IEM_MC_NATIVE_ELSE() { \
772 IEM_MC_LOCAL(X86XMMREG, SseRes); \
773 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0); \
774 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1); \
775 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
776 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2); \
777 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
778 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2); \
779 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes); \
780 } IEM_MC_NATIVE_ENDIF(); \
781 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
782 IEM_MC_END(); \
783 } \
784 else \
785 { \
786 /* \
787 * XMM, [mem128]. \
788 */ \
789 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
790 IEM_MC_LOCAL(X86XMMREG, uSrc2); \
791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse); \
794 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
795 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
796 IEM_MC_PREPARE_SSE_USAGE(); \
797 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
798 IEM_MC_NATIVE_EMIT_2_EX(RT_CONCAT3(iemNativeEmit_,a_Ins,_rv_u128), IEM_GET_MODRM_REG(pVCpu, bRm), uSrc2); \
799 } IEM_MC_NATIVE_ELSE() { \
800 IEM_MC_LOCAL(X86XMMREG, SseRes); \
801 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0); \
802 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1); \
803 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2); \
804 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
805 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2); \
806 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes); \
807 } IEM_MC_NATIVE_ENDIF(); \
808 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
809 IEM_MC_END(); \
810 } void(0)
811
812
813/**
814 * Common worker for SSE instructions on the forms:
815 * pxxs xmm1, xmm2/mem32
816 *
817 * Proper alignment of the 128-bit operand is enforced.
818 * Exceptions type 3. SSE cpuid checks.
819 *
820 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
821 */
822FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
823{
824 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
825 if (IEM_IS_MODRM_REG_MODE(bRm))
826 {
827 /*
828 * XMM128, XMM32.
829 */
830 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
832 IEM_MC_LOCAL(X86XMMREG, SseRes);
833 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
834 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
835 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
836 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
837 IEM_MC_PREPARE_SSE_USAGE();
838 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
839 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
840 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
841 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
842
843 IEM_MC_ADVANCE_RIP_AND_FINISH();
844 IEM_MC_END();
845 }
846 else
847 {
848 /*
849 * XMM128, [mem32].
850 */
851 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
852 IEM_MC_LOCAL(X86XMMREG, SseRes);
853 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
854 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
855 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
856 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
858
859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
861 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
862 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
863
864 IEM_MC_PREPARE_SSE_USAGE();
865 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
866 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
867 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
868
869 IEM_MC_ADVANCE_RIP_AND_FINISH();
870 IEM_MC_END();
871 }
872}
873
874
875/**
876 * Common worker for SSE2 instructions on the forms:
877 * pxxd xmm1, xmm2/mem128
878 *
879 * Proper alignment of the 128-bit operand is enforced.
880 * Exceptions type 2. SSE cpuid checks.
881 *
882 * @sa iemOpCommonSseFp_FullFull_To_Full
883 */
884FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
885{
886 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
887 if (IEM_IS_MODRM_REG_MODE(bRm))
888 {
889 /*
890 * XMM128, XMM128.
891 */
892 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
894 IEM_MC_LOCAL(X86XMMREG, SseRes);
895 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
896 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
897 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
898 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
899 IEM_MC_PREPARE_SSE_USAGE();
900 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
901 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
902 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
903 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
904
905 IEM_MC_ADVANCE_RIP_AND_FINISH();
906 IEM_MC_END();
907 }
908 else
909 {
910 /*
911 * XMM128, [mem128].
912 */
913 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
914 IEM_MC_LOCAL(X86XMMREG, SseRes);
915 IEM_MC_LOCAL(X86XMMREG, uSrc2);
916 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
917 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
918 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
920
921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
923 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
924 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
925
926 IEM_MC_PREPARE_SSE_USAGE();
927 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
928 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
929 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
930
931 IEM_MC_ADVANCE_RIP_AND_FINISH();
932 IEM_MC_END();
933 }
934}
935
936
937/**
938 * Common worker for SSE2 instructions on the forms:
939 * pxxs xmm1, xmm2/mem64
940 *
941 * Proper alignment of the 128-bit operand is enforced.
942 * Exceptions type 3. SSE2 cpuid checks.
943 *
944 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
945 */
946FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
947{
948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
949 if (IEM_IS_MODRM_REG_MODE(bRm))
950 {
951 /*
952 * XMM, XMM.
953 */
954 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
956 IEM_MC_LOCAL(X86XMMREG, SseRes);
957 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
958 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
959 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
960 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
961 IEM_MC_PREPARE_SSE_USAGE();
962 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
963 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
964 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
965 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
966
967 IEM_MC_ADVANCE_RIP_AND_FINISH();
968 IEM_MC_END();
969 }
970 else
971 {
972 /*
973 * XMM, [mem64].
974 */
975 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
976 IEM_MC_LOCAL(X86XMMREG, SseRes);
977 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
978 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
979 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
980 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
982
983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
985 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
986 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
987
988 IEM_MC_PREPARE_SSE_USAGE();
989 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
990 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
991 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
992
993 IEM_MC_ADVANCE_RIP_AND_FINISH();
994 IEM_MC_END();
995 }
996}
997
998
999/**
1000 * Common worker for SSE2 instructions on the form:
1001 * pxxxx xmm1, xmm2/mem128
1002 *
1003 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1004 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1005 *
1006 * Exceptions type 4.
1007 */
1008FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1009{
1010 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1011 if (IEM_IS_MODRM_REG_MODE(bRm))
1012 {
1013 /*
1014 * XMM, XMM.
1015 */
1016 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1018 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1019 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1020 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1021 IEM_MC_PREPARE_SSE_USAGE();
1022 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1023 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1024 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1025 IEM_MC_ADVANCE_RIP_AND_FINISH();
1026 IEM_MC_END();
1027 }
1028 else
1029 {
1030 /*
1031 * XMM, [mem128].
1032 */
1033 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1034 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1035 IEM_MC_LOCAL(RTUINT128U, uSrc);
1036 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1038
1039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1041 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1042 /** @todo Most CPUs probably only read the high qword. We read everything to
1043 * make sure we apply segmentation and alignment checks correctly.
1044 * When we have time, it would be interesting to explore what real
1045 * CPUs actually does and whether it will do a TLB load for the lower
1046 * part or skip any associated \#PF. */
1047 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1048
1049 IEM_MC_PREPARE_SSE_USAGE();
1050 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1051 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1052
1053 IEM_MC_ADVANCE_RIP_AND_FINISH();
1054 IEM_MC_END();
1055 }
1056}
1057
1058
1059/**
1060 * Common worker for SSE3 instructions on the forms:
1061 * hxxx xmm1, xmm2/mem128
1062 *
1063 * Proper alignment of the 128-bit operand is enforced.
1064 * Exceptions type 2. SSE3 cpuid checks.
1065 *
1066 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1067 */
1068FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1069{
1070 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1071 if (IEM_IS_MODRM_REG_MODE(bRm))
1072 {
1073 /*
1074 * XMM, XMM.
1075 */
1076 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1078 IEM_MC_LOCAL(X86XMMREG, SseRes);
1079 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
1080 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1081 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1082 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1083 IEM_MC_PREPARE_SSE_USAGE();
1084 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1085 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1086 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1087 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
1088
1089 IEM_MC_ADVANCE_RIP_AND_FINISH();
1090 IEM_MC_END();
1091 }
1092 else
1093 {
1094 /*
1095 * XMM, [mem128].
1096 */
1097 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1098 IEM_MC_LOCAL(X86XMMREG, SseRes);
1099 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1100 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
1101 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1102 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1104
1105 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1107 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1108 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1109
1110 IEM_MC_PREPARE_SSE_USAGE();
1111 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1112 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1113 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
1114
1115 IEM_MC_ADVANCE_RIP_AND_FINISH();
1116 IEM_MC_END();
1117 }
1118}
1119
1120
1121/** Opcode 0x0f 0x00 /0. */
1122FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1123{
1124 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1125 IEMOP_HLP_MIN_286();
1126 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1127
1128 if (IEM_IS_MODRM_REG_MODE(bRm))
1129 {
1130 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1131 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1132 iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1133 }
1134
1135 /* Ignore operand size here, memory refs are always 16-bit. */
1136 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1137 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1139 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1140 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1141 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1142 IEM_MC_END();
1143}
1144
1145
1146/** Opcode 0x0f 0x00 /1. */
1147FNIEMOPRM_DEF(iemOp_Grp6_str)
1148{
1149 IEMOP_MNEMONIC(str, "str Rv/Mw");
1150 IEMOP_HLP_MIN_286();
1151 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1152
1153
1154 if (IEM_IS_MODRM_REG_MODE(bRm))
1155 {
1156 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1157 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1158 iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1159 }
1160
1161 /* Ignore operand size here, memory refs are always 16-bit. */
1162 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1163 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1165 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1166 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1167 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1168 IEM_MC_END();
1169}
1170
1171
1172/** Opcode 0x0f 0x00 /2. */
1173FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1174{
1175 IEMOP_MNEMONIC(lldt, "lldt Ew");
1176 IEMOP_HLP_MIN_286();
1177 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1178
1179 if (IEM_IS_MODRM_REG_MODE(bRm))
1180 {
1181 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1182 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1183 IEM_MC_ARG(uint16_t, u16Sel, 0);
1184 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1185 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1186 IEM_MC_END();
1187 }
1188 else
1189 {
1190 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1191 IEM_MC_ARG(uint16_t, u16Sel, 0);
1192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1194 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1195 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1196 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1197 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1198 IEM_MC_END();
1199 }
1200}
1201
1202
1203/** Opcode 0x0f 0x00 /3. */
1204FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1205{
1206 IEMOP_MNEMONIC(ltr, "ltr Ew");
1207 IEMOP_HLP_MIN_286();
1208 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1209
1210 if (IEM_IS_MODRM_REG_MODE(bRm))
1211 {
1212 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1214 IEM_MC_ARG(uint16_t, u16Sel, 0);
1215 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1216 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1217 IEM_MC_END();
1218 }
1219 else
1220 {
1221 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1222 IEM_MC_ARG(uint16_t, u16Sel, 0);
1223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1226 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1227 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1228 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1229 IEM_MC_END();
1230 }
1231}
1232
1233
1234/* Need to associate flag info with the blocks, so duplicate the code. */
1235#define IEMOP_BODY_GRP6_VERX(bRm, fWrite) \
1236 IEMOP_HLP_MIN_286(); \
1237 IEMOP_HLP_NO_REAL_OR_V86_MODE(); \
1238 \
1239 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1240 { \
1241 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0); \
1242 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1243 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1244 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1245 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1246 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1247 IEM_MC_END(); \
1248 } \
1249 else \
1250 { \
1251 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0); \
1252 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1253 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1256 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1257 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1258 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1259 IEM_MC_END(); \
1260 } (void)0
1261
1262/**
1263 * @opmaps grp6
1264 * @opcode /4
1265 * @opflmodify zf
1266 */
1267FNIEMOPRM_DEF(iemOp_Grp6_verr)
1268{
1269 IEMOP_MNEMONIC(verr, "verr Ew");
1270 IEMOP_BODY_GRP6_VERX(bRm, false);
1271}
1272
1273
1274/**
1275 * @opmaps grp6
1276 * @opcode /5
1277 * @opflmodify zf
1278 */
1279FNIEMOPRM_DEF(iemOp_Grp6_verw)
1280{
1281 IEMOP_MNEMONIC(verw, "verw Ew");
1282 IEMOP_BODY_GRP6_VERX(bRm, true);
1283}
1284
1285
1286/**
1287 * Group 6 jump table.
1288 */
1289IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1290{
1291 iemOp_Grp6_sldt,
1292 iemOp_Grp6_str,
1293 iemOp_Grp6_lldt,
1294 iemOp_Grp6_ltr,
1295 iemOp_Grp6_verr,
1296 iemOp_Grp6_verw,
1297 iemOp_InvalidWithRM,
1298 iemOp_InvalidWithRM
1299};
1300
1301/** Opcode 0x0f 0x00. */
1302FNIEMOP_DEF(iemOp_Grp6)
1303{
1304 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1305 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1306}
1307
1308
1309/** Opcode 0x0f 0x01 /0. */
1310FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1311{
1312 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1313 IEMOP_HLP_MIN_286();
1314 IEMOP_HLP_64BIT_OP_SIZE();
1315 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1316 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1319 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1320 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1321 IEM_MC_END();
1322}
1323
1324
1325/** Opcode 0x0f 0x01 /0. */
1326FNIEMOP_DEF(iemOp_Grp7_vmcall)
1327{
1328 IEMOP_MNEMONIC(vmcall, "vmcall");
1329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1330
1331 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1332 want all hypercalls regardless of instruction used, and if a
1333 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1334 (NEM/win makes ASSUMPTIONS about this behavior.) */
1335 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmcall);
1336}
1337
1338
1339/** Opcode 0x0f 0x01 /0. */
1340#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1341FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1342{
1343 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1344 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1345 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1346 IEMOP_HLP_DONE_DECODING();
1347 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1348 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1349 iemCImpl_vmlaunch);
1350}
1351#else
1352FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1353{
1354 IEMOP_BITCH_ABOUT_STUB();
1355 IEMOP_RAISE_INVALID_OPCODE_RET();
1356}
1357#endif
1358
1359
1360/** Opcode 0x0f 0x01 /0. */
1361#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1362FNIEMOP_DEF(iemOp_Grp7_vmresume)
1363{
1364 IEMOP_MNEMONIC(vmresume, "vmresume");
1365 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1366 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1367 IEMOP_HLP_DONE_DECODING();
1368 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1369 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1370 iemCImpl_vmresume);
1371}
1372#else
1373FNIEMOP_DEF(iemOp_Grp7_vmresume)
1374{
1375 IEMOP_BITCH_ABOUT_STUB();
1376 IEMOP_RAISE_INVALID_OPCODE_RET();
1377}
1378#endif
1379
1380
1381/** Opcode 0x0f 0x01 /0. */
1382#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1383FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1384{
1385 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1386 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1387 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1388 IEMOP_HLP_DONE_DECODING();
1389 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmxoff);
1390}
1391#else
1392FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1393{
1394 IEMOP_BITCH_ABOUT_STUB();
1395 IEMOP_RAISE_INVALID_OPCODE_RET();
1396}
1397#endif
1398
1399
1400/** Opcode 0x0f 0x01 /1. */
1401FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1402{
1403 IEMOP_MNEMONIC(sidt, "sidt Ms");
1404 IEMOP_HLP_MIN_286();
1405 IEMOP_HLP_64BIT_OP_SIZE();
1406 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1407 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1410 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1411 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1412 IEM_MC_END();
1413}
1414
1415
1416/** Opcode 0x0f 0x01 /1. */
1417FNIEMOP_DEF(iemOp_Grp7_monitor)
1418{
1419 IEMOP_MNEMONIC(monitor, "monitor");
1420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1421 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1422}
1423
1424
1425/** Opcode 0x0f 0x01 /1. */
1426FNIEMOP_DEF(iemOp_Grp7_mwait)
1427{
1428 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1430 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_mwait);
1431}
1432
1433
1434/** Opcode 0x0f 0x01 /2. */
1435FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1436{
1437 IEMOP_MNEMONIC(lgdt, "lgdt");
1438 IEMOP_HLP_64BIT_OP_SIZE();
1439 IEM_MC_BEGIN(0, 0);
1440 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1443 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1444 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1445 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1446 IEM_MC_END();
1447}
1448
1449
1450/** Opcode 0x0f 0x01 0xd0. */
1451FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1452{
1453 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1454 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1455 {
1456 /** @todo r=ramshankar: We should use
1457 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1458 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1459/** @todo testcase: test prefixes and exceptions. currently not checking for the
1460 * OPSIZE one ... */
1461 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1462 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
1463 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1464 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
1465 iemCImpl_xgetbv);
1466 }
1467 IEMOP_RAISE_INVALID_OPCODE_RET();
1468}
1469
1470
1471/** Opcode 0x0f 0x01 0xd1. */
1472FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1473{
1474 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1475 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1476 {
1477 /** @todo r=ramshankar: We should use
1478 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1479 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1480/** @todo testcase: test prefixes and exceptions. currently not checking for the
1481 * OPSIZE one ... */
1482 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1483 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_xsetbv);
1484 }
1485 IEMOP_RAISE_INVALID_OPCODE_RET();
1486}
1487
1488
1489/** Opcode 0x0f 0x01 /3. */
1490FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1491{
1492 IEMOP_MNEMONIC(lidt, "lidt");
1493 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1494 IEM_MC_BEGIN(0, 0);
1495 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1498 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1499 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg, /*=*/ enmEffOpSize, 2);
1500 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1501 IEM_MC_END();
1502}
1503
1504
1505/** Opcode 0x0f 0x01 0xd8. */
1506#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1507FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1508{
1509 IEMOP_MNEMONIC(vmrun, "vmrun");
1510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1511 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1512 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1513 iemCImpl_vmrun);
1514}
1515#else
1516FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1517#endif
1518
1519/** Opcode 0x0f 0x01 0xd9. */
1520FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1521{
1522 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1523 /** @todo r=bird: Table A-8 on page 524 in vol 3 has VMGEXIT for this
1524 * opcode sequence when F3 or F2 is used as prefix. So, the assumtion
1525 * here cannot be right... */
1526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1527
1528 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1529 want all hypercalls regardless of instruction used, and if a
1530 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1531 (NEM/win makes ASSUMPTIONS about this behavior.) */
1532 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmmcall);
1533}
1534
1535/** Opcode 0x0f 0x01 0xda. */
1536#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1537FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1538{
1539 IEMOP_MNEMONIC(vmload, "vmload");
1540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1541 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmload);
1542}
1543#else
1544FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1545#endif
1546
1547
1548/** Opcode 0x0f 0x01 0xdb. */
1549#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1550FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1551{
1552 IEMOP_MNEMONIC(vmsave, "vmsave");
1553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1554 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmsave);
1555}
1556#else
1557FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1558#endif
1559
1560
1561/** Opcode 0x0f 0x01 0xdc. */
1562#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1563FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1564{
1565 IEMOP_MNEMONIC(stgi, "stgi");
1566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1567 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_stgi);
1568}
1569#else
1570FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1571#endif
1572
1573
1574/** Opcode 0x0f 0x01 0xdd. */
1575#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1576FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1577{
1578 IEMOP_MNEMONIC(clgi, "clgi");
1579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1580 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clgi);
1581}
1582#else
1583FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1584#endif
1585
1586
1587/** Opcode 0x0f 0x01 0xdf. */
1588#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1589FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1590{
1591 IEMOP_MNEMONIC(invlpga, "invlpga");
1592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1593 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpga);
1594}
1595#else
1596FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1597#endif
1598
1599
1600/** Opcode 0x0f 0x01 0xde. */
1601#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1602FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1603{
1604 IEMOP_MNEMONIC(skinit, "skinit");
1605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1606 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_skinit);
1607}
1608#else
1609FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1610#endif
1611
1612
1613/** Opcode 0x0f 0x01 /4. */
1614FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1615{
1616 IEMOP_MNEMONIC(smsw, "smsw");
1617 IEMOP_HLP_MIN_286();
1618 if (IEM_IS_MODRM_REG_MODE(bRm))
1619 {
1620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1621 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1622 iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1623 }
1624
1625 /* Ignore operand size here, memory refs are always 16-bit. */
1626 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1627 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1630 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1631 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1632 IEM_MC_END();
1633}
1634
1635
1636/** Opcode 0x0f 0x01 /6. */
1637FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1638{
1639 /* The operand size is effectively ignored, all is 16-bit and only the
1640 lower 3-bits are used. */
1641 IEMOP_MNEMONIC(lmsw, "lmsw");
1642 IEMOP_HLP_MIN_286();
1643 if (IEM_IS_MODRM_REG_MODE(bRm))
1644 {
1645 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1647 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1648 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1649 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1650 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1651 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1652 IEM_MC_END();
1653 }
1654 else
1655 {
1656 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1657 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1658 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1661 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1662 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1663 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1664 IEM_MC_END();
1665 }
1666}
1667
1668
1669/** Opcode 0x0f 0x01 /7. */
1670FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1671{
1672 IEMOP_MNEMONIC(invlpg, "invlpg");
1673 IEMOP_HLP_MIN_486();
1674 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1675 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1678 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpg, GCPtrEffDst);
1679 IEM_MC_END();
1680}
1681
1682
1683/** Opcode 0x0f 0x01 0xf8. */
1684FNIEMOP_DEF(iemOp_Grp7_swapgs)
1685{
1686 IEMOP_MNEMONIC(swapgs, "swapgs");
1687 IEMOP_HLP_ONLY_64BIT();
1688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1689 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS), iemCImpl_swapgs);
1690}
1691
1692
1693/** Opcode 0x0f 0x01 0xf9. */
1694FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1695{
1696 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1698 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
1699 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1700 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
1701 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
1702 iemCImpl_rdtscp);
1703}
1704
1705
1706/**
1707 * Group 7 jump table, memory variant.
1708 */
1709IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1710{
1711 iemOp_Grp7_sgdt,
1712 iemOp_Grp7_sidt,
1713 iemOp_Grp7_lgdt,
1714 iemOp_Grp7_lidt,
1715 iemOp_Grp7_smsw,
1716 iemOp_InvalidWithRM,
1717 iemOp_Grp7_lmsw,
1718 iemOp_Grp7_invlpg
1719};
1720
1721
1722/** Opcode 0x0f 0x01. */
1723FNIEMOP_DEF(iemOp_Grp7)
1724{
1725 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1726 if (IEM_IS_MODRM_MEM_MODE(bRm))
1727 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1728
1729 switch (IEM_GET_MODRM_REG_8(bRm))
1730 {
1731 case 0:
1732 switch (IEM_GET_MODRM_RM_8(bRm))
1733 {
1734 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1735 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1736 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1737 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1738 }
1739 IEMOP_RAISE_INVALID_OPCODE_RET();
1740
1741 case 1:
1742 switch (IEM_GET_MODRM_RM_8(bRm))
1743 {
1744 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1745 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1746 }
1747 IEMOP_RAISE_INVALID_OPCODE_RET();
1748
1749 case 2:
1750 switch (IEM_GET_MODRM_RM_8(bRm))
1751 {
1752 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1753 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1754 }
1755 IEMOP_RAISE_INVALID_OPCODE_RET();
1756
1757 case 3:
1758 switch (IEM_GET_MODRM_RM_8(bRm))
1759 {
1760 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1761 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1762 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1763 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1764 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1765 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1766 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1767 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1768 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1769 }
1770
1771 case 4:
1772 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1773
1774 case 5:
1775 IEMOP_RAISE_INVALID_OPCODE_RET();
1776
1777 case 6:
1778 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1779
1780 case 7:
1781 switch (IEM_GET_MODRM_RM_8(bRm))
1782 {
1783 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1784 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1785 }
1786 IEMOP_RAISE_INVALID_OPCODE_RET();
1787
1788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1789 }
1790}
1791
1792FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1793{
1794 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1796
1797 if (IEM_IS_MODRM_REG_MODE(bRm))
1798 {
1799 switch (pVCpu->iem.s.enmEffOpSize)
1800 {
1801 case IEMMODE_16BIT:
1802 IEM_MC_BEGIN(0, 0);
1803 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1804 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1805 IEM_MC_ARG(uint16_t, u16Sel, 1);
1806 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1807
1808 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1809 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1810 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1811 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1812
1813 IEM_MC_END();
1814 break;
1815
1816 case IEMMODE_32BIT:
1817 case IEMMODE_64BIT:
1818 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1819 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1820 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1821 IEM_MC_ARG(uint16_t, u16Sel, 1);
1822 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1823
1824 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1825 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1826 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1827 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1828
1829 IEM_MC_END();
1830 break;
1831
1832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1833 }
1834 }
1835 else
1836 {
1837 switch (pVCpu->iem.s.enmEffOpSize)
1838 {
1839 case IEMMODE_16BIT:
1840 IEM_MC_BEGIN(0, 0);
1841 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1842 IEM_MC_ARG(uint16_t, u16Sel, 1);
1843 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1845
1846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1847 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1848
1849 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1850 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1851 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1852 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1853
1854 IEM_MC_END();
1855 break;
1856
1857 case IEMMODE_32BIT:
1858 case IEMMODE_64BIT:
1859 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1860 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1861 IEM_MC_ARG(uint16_t, u16Sel, 1);
1862 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1864
1865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1866 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1867/** @todo testcase: make sure it's a 16-bit read. */
1868
1869 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1870 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1871 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1872 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1873
1874 IEM_MC_END();
1875 break;
1876
1877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1878 }
1879 }
1880}
1881
1882
1883
1884/**
1885 * @opcode 0x02
1886 * @opflmodify zf
1887 */
1888FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1889{
1890 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1891 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1892}
1893
1894
1895/**
1896 * @opcode 0x03
1897 * @opflmodify zf
1898 */
1899FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1900{
1901 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1902 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1903}
1904
1905
1906/** Opcode 0x0f 0x05. */
1907FNIEMOP_DEF(iemOp_syscall)
1908{
1909 if (RT_LIKELY(pVCpu->iem.s.uTargetCpu != IEMTARGETCPU_286))
1910 {
1911 IEMOP_MNEMONIC(syscall, "syscall");
1912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1913 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1914 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0, iemCImpl_syscall);
1915 }
1916 else
1917 {
1918 IEMOP_MNEMONIC(loadall286, "loadall286");
1919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1920 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1921 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1922 RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_loadall286);
1923 }
1924}
1925
1926
1927/** Opcode 0x0f 0x06. */
1928FNIEMOP_DEF(iemOp_clts)
1929{
1930 IEMOP_MNEMONIC(clts, "clts");
1931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1932 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_clts);
1933}
1934
1935
1936/** Opcode 0x0f 0x07. */
1937FNIEMOP_DEF(iemOp_sysret)
1938{
1939 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1941 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1942 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
1943 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
1944}
1945
1946
1947/** Opcode 0x0f 0x08. */
1948FNIEMOP_DEF(iemOp_invd)
1949{
1950 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1951 IEMOP_HLP_MIN_486();
1952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1953 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invd);
1954}
1955
1956
1957/** Opcode 0x0f 0x09. */
1958FNIEMOP_DEF(iemOp_wbinvd)
1959{
1960 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1961 IEMOP_HLP_MIN_486();
1962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1963 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wbinvd);
1964}
1965
1966
1967/** Opcode 0x0f 0x0b. */
1968FNIEMOP_DEF(iemOp_ud2)
1969{
1970 IEMOP_MNEMONIC(ud2, "ud2");
1971 IEMOP_RAISE_INVALID_OPCODE_RET();
1972}
1973
1974/** Opcode 0x0f 0x0d. */
1975FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1976{
1977 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1978 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1979 {
1980 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1981 IEMOP_RAISE_INVALID_OPCODE_RET();
1982 }
1983
1984 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1985 if (IEM_IS_MODRM_REG_MODE(bRm))
1986 {
1987 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1988 IEMOP_RAISE_INVALID_OPCODE_RET();
1989 }
1990
1991 switch (IEM_GET_MODRM_REG_8(bRm))
1992 {
1993 case 2: /* Aliased to /0 for the time being. */
1994 case 4: /* Aliased to /0 for the time being. */
1995 case 5: /* Aliased to /0 for the time being. */
1996 case 6: /* Aliased to /0 for the time being. */
1997 case 7: /* Aliased to /0 for the time being. */
1998 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1999 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2000 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2001 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2002 }
2003
2004 IEM_MC_BEGIN(0, 0);
2005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2008 /* Currently a NOP. */
2009 IEM_MC_NOREF(GCPtrEffSrc);
2010 IEM_MC_ADVANCE_RIP_AND_FINISH();
2011 IEM_MC_END();
2012}
2013
2014
2015/** Opcode 0x0f 0x0e. */
2016FNIEMOP_DEF(iemOp_femms)
2017{
2018 IEMOP_MNEMONIC(femms, "femms");
2019
2020 IEM_MC_BEGIN(0, 0);
2021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2022 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2023 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2024 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2025 IEM_MC_FPU_FROM_MMX_MODE();
2026 IEM_MC_ADVANCE_RIP_AND_FINISH();
2027 IEM_MC_END();
2028}
2029
2030
2031/** Opcode 0x0f 0x0f. */
2032FNIEMOP_DEF(iemOp_3Dnow)
2033{
2034 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2035 {
2036 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2037 IEMOP_RAISE_INVALID_OPCODE_RET();
2038 }
2039
2040#ifdef IEM_WITH_3DNOW
2041 /* This is pretty sparse, use switch instead of table. */
2042 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2043 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2044#else
2045 IEMOP_BITCH_ABOUT_STUB();
2046 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2047#endif
2048}
2049
2050
2051/**
2052 * @opcode 0x10
2053 * @oppfx none
2054 * @opcpuid sse
2055 * @opgroup og_sse_simdfp_datamove
2056 * @opxcpttype 4UA
2057 * @optest op1=1 op2=2 -> op1=2
2058 * @optest op1=0 op2=-22 -> op1=-22
2059 */
2060FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2061{
2062 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2063 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2064 if (IEM_IS_MODRM_REG_MODE(bRm))
2065 {
2066 /*
2067 * XMM128, XMM128.
2068 */
2069 IEM_MC_BEGIN(0, 0);
2070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2071 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2072 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2073 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2074 IEM_GET_MODRM_RM(pVCpu, bRm));
2075 IEM_MC_ADVANCE_RIP_AND_FINISH();
2076 IEM_MC_END();
2077 }
2078 else
2079 {
2080 /*
2081 * XMM128, [mem128].
2082 */
2083 IEM_MC_BEGIN(0, 0);
2084 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2086
2087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2089 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2090 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2091
2092 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2093 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2094
2095 IEM_MC_ADVANCE_RIP_AND_FINISH();
2096 IEM_MC_END();
2097 }
2098
2099}
2100
2101
2102/**
2103 * @opcode 0x10
2104 * @oppfx 0x66
2105 * @opcpuid sse2
2106 * @opgroup og_sse2_pcksclr_datamove
2107 * @opxcpttype 4UA
2108 * @optest op1=1 op2=2 -> op1=2
2109 * @optest op1=0 op2=-42 -> op1=-42
2110 */
2111FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2112{
2113 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2114 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2115 if (IEM_IS_MODRM_REG_MODE(bRm))
2116 {
2117 /*
2118 * XMM128, XMM128.
2119 */
2120 IEM_MC_BEGIN(0, 0);
2121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2122 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2123 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2124 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2125 IEM_GET_MODRM_RM(pVCpu, bRm));
2126 IEM_MC_ADVANCE_RIP_AND_FINISH();
2127 IEM_MC_END();
2128 }
2129 else
2130 {
2131 /*
2132 * XMM128, [mem128].
2133 */
2134 IEM_MC_BEGIN(0, 0);
2135 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2137
2138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2140 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2141 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2142
2143 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2144 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2145
2146 IEM_MC_ADVANCE_RIP_AND_FINISH();
2147 IEM_MC_END();
2148 }
2149}
2150
2151
2152/**
2153 * @opcode 0x10
2154 * @oppfx 0xf3
2155 * @opcpuid sse
2156 * @opgroup og_sse_simdfp_datamove
2157 * @opxcpttype 5
2158 * @optest op1=1 op2=2 -> op1=2
2159 * @optest op1=0 op2=-22 -> op1=-22
2160 */
2161FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2162{
2163 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2165 if (IEM_IS_MODRM_REG_MODE(bRm))
2166 {
2167 /*
2168 * XMM32, XMM32.
2169 */
2170 IEM_MC_BEGIN(0, 0);
2171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2172 IEM_MC_LOCAL(uint32_t, uSrc);
2173
2174 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2175 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2176 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2177 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2178
2179 IEM_MC_ADVANCE_RIP_AND_FINISH();
2180 IEM_MC_END();
2181 }
2182 else
2183 {
2184 /*
2185 * XMM128, [mem32].
2186 */
2187 IEM_MC_BEGIN(0, 0);
2188 IEM_MC_LOCAL(uint32_t, uSrc);
2189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2190
2191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2193 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2194 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2195
2196 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2197 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2198
2199 IEM_MC_ADVANCE_RIP_AND_FINISH();
2200 IEM_MC_END();
2201 }
2202}
2203
2204
2205/**
2206 * @opcode 0x10
2207 * @oppfx 0xf2
2208 * @opcpuid sse2
2209 * @opgroup og_sse2_pcksclr_datamove
2210 * @opxcpttype 5
2211 * @optest op1=1 op2=2 -> op1=2
2212 * @optest op1=0 op2=-42 -> op1=-42
2213 */
2214FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2215{
2216 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2218 if (IEM_IS_MODRM_REG_MODE(bRm))
2219 {
2220 /*
2221 * XMM64, XMM64.
2222 */
2223 IEM_MC_BEGIN(0, 0);
2224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2225 IEM_MC_LOCAL(uint64_t, uSrc);
2226
2227 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2228 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2229 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2230 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2231
2232 IEM_MC_ADVANCE_RIP_AND_FINISH();
2233 IEM_MC_END();
2234 }
2235 else
2236 {
2237 /*
2238 * XMM128, [mem64].
2239 */
2240 IEM_MC_BEGIN(0, 0);
2241 IEM_MC_LOCAL(uint64_t, uSrc);
2242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2243
2244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2246 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2247 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2248
2249 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2250 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2251
2252 IEM_MC_ADVANCE_RIP_AND_FINISH();
2253 IEM_MC_END();
2254 }
2255}
2256
2257
2258/**
2259 * @opcode 0x11
2260 * @oppfx none
2261 * @opcpuid sse
2262 * @opgroup og_sse_simdfp_datamove
2263 * @opxcpttype 4UA
2264 * @optest op1=1 op2=2 -> op1=2
2265 * @optest op1=0 op2=-42 -> op1=-42
2266 */
2267FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2268{
2269 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2270 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2271 if (IEM_IS_MODRM_REG_MODE(bRm))
2272 {
2273 /*
2274 * XMM128, XMM128.
2275 */
2276 IEM_MC_BEGIN(0, 0);
2277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2278 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2279 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2280 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2281 IEM_GET_MODRM_REG(pVCpu, bRm));
2282 IEM_MC_ADVANCE_RIP_AND_FINISH();
2283 IEM_MC_END();
2284 }
2285 else
2286 {
2287 /*
2288 * [mem128], XMM128.
2289 */
2290 IEM_MC_BEGIN(0, 0);
2291 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2293
2294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2296 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2297 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2298
2299 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2300 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2301
2302 IEM_MC_ADVANCE_RIP_AND_FINISH();
2303 IEM_MC_END();
2304 }
2305}
2306
2307
2308/**
2309 * @opcode 0x11
2310 * @oppfx 0x66
2311 * @opcpuid sse2
2312 * @opgroup og_sse2_pcksclr_datamove
2313 * @opxcpttype 4UA
2314 * @optest op1=1 op2=2 -> op1=2
2315 * @optest op1=0 op2=-42 -> op1=-42
2316 */
2317FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2318{
2319 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2321 if (IEM_IS_MODRM_REG_MODE(bRm))
2322 {
2323 /*
2324 * XMM128, XMM128.
2325 */
2326 IEM_MC_BEGIN(0, 0);
2327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2328 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2329 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2330 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2331 IEM_GET_MODRM_REG(pVCpu, bRm));
2332 IEM_MC_ADVANCE_RIP_AND_FINISH();
2333 IEM_MC_END();
2334 }
2335 else
2336 {
2337 /*
2338 * [mem128], XMM128.
2339 */
2340 IEM_MC_BEGIN(0, 0);
2341 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2343
2344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2346 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2348
2349 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2350 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2351
2352 IEM_MC_ADVANCE_RIP_AND_FINISH();
2353 IEM_MC_END();
2354 }
2355}
2356
2357
2358/**
2359 * @opcode 0x11
2360 * @oppfx 0xf3
2361 * @opcpuid sse
2362 * @opgroup og_sse_simdfp_datamove
2363 * @opxcpttype 5
2364 * @optest op1=1 op2=2 -> op1=2
2365 * @optest op1=0 op2=-22 -> op1=-22
2366 */
2367FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2368{
2369 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2371 if (IEM_IS_MODRM_REG_MODE(bRm))
2372 {
2373 /*
2374 * XMM32, XMM32.
2375 */
2376 IEM_MC_BEGIN(0, 0);
2377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2378 IEM_MC_LOCAL(uint32_t, uSrc);
2379
2380 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2381 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2382 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2383 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2384
2385 IEM_MC_ADVANCE_RIP_AND_FINISH();
2386 IEM_MC_END();
2387 }
2388 else
2389 {
2390 /*
2391 * [mem32], XMM32.
2392 */
2393 IEM_MC_BEGIN(0, 0);
2394 IEM_MC_LOCAL(uint32_t, uSrc);
2395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2396
2397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2399 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2400 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2401
2402 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2403 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2404
2405 IEM_MC_ADVANCE_RIP_AND_FINISH();
2406 IEM_MC_END();
2407 }
2408}
2409
2410
2411/**
2412 * @opcode 0x11
2413 * @oppfx 0xf2
2414 * @opcpuid sse2
2415 * @opgroup og_sse2_pcksclr_datamove
2416 * @opxcpttype 5
2417 * @optest op1=1 op2=2 -> op1=2
2418 * @optest op1=0 op2=-42 -> op1=-42
2419 */
2420FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2421{
2422 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2423 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2424 if (IEM_IS_MODRM_REG_MODE(bRm))
2425 {
2426 /*
2427 * XMM64, XMM64.
2428 */
2429 IEM_MC_BEGIN(0, 0);
2430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2431 IEM_MC_LOCAL(uint64_t, uSrc);
2432
2433 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2434 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2435 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2436 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2437
2438 IEM_MC_ADVANCE_RIP_AND_FINISH();
2439 IEM_MC_END();
2440 }
2441 else
2442 {
2443 /*
2444 * [mem64], XMM64.
2445 */
2446 IEM_MC_BEGIN(0, 0);
2447 IEM_MC_LOCAL(uint64_t, uSrc);
2448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2449
2450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2452 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2453 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2454
2455 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2456 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2457
2458 IEM_MC_ADVANCE_RIP_AND_FINISH();
2459 IEM_MC_END();
2460 }
2461}
2462
2463
2464FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2465{
2466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2467 if (IEM_IS_MODRM_REG_MODE(bRm))
2468 {
2469 /**
2470 * @opcode 0x12
2471 * @opcodesub 11 mr/reg
2472 * @oppfx none
2473 * @opcpuid sse
2474 * @opgroup og_sse_simdfp_datamove
2475 * @opxcpttype 5
2476 * @optest op1=1 op2=2 -> op1=2
2477 * @optest op1=0 op2=-42 -> op1=-42
2478 */
2479 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2480
2481 IEM_MC_BEGIN(0, 0);
2482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2483 IEM_MC_LOCAL(uint64_t, uSrc);
2484
2485 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2486 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2487 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2488 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2489
2490 IEM_MC_ADVANCE_RIP_AND_FINISH();
2491 IEM_MC_END();
2492 }
2493 else
2494 {
2495 /**
2496 * @opdone
2497 * @opcode 0x12
2498 * @opcodesub !11 mr/reg
2499 * @oppfx none
2500 * @opcpuid sse
2501 * @opgroup og_sse_simdfp_datamove
2502 * @opxcpttype 5
2503 * @optest op1=1 op2=2 -> op1=2
2504 * @optest op1=0 op2=-42 -> op1=-42
2505 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2506 */
2507 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2508
2509 IEM_MC_BEGIN(0, 0);
2510 IEM_MC_LOCAL(uint64_t, uSrc);
2511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2512
2513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2515 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2516 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2517
2518 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2519 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2520
2521 IEM_MC_ADVANCE_RIP_AND_FINISH();
2522 IEM_MC_END();
2523 }
2524}
2525
2526
2527/**
2528 * @opcode 0x12
2529 * @opcodesub !11 mr/reg
2530 * @oppfx 0x66
2531 * @opcpuid sse2
2532 * @opgroup og_sse2_pcksclr_datamove
2533 * @opxcpttype 5
2534 * @optest op1=1 op2=2 -> op1=2
2535 * @optest op1=0 op2=-42 -> op1=-42
2536 */
2537FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2538{
2539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2540 if (IEM_IS_MODRM_MEM_MODE(bRm))
2541 {
2542 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2543
2544 IEM_MC_BEGIN(0, 0);
2545 IEM_MC_LOCAL(uint64_t, uSrc);
2546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2547
2548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2550 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2551 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2552
2553 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2554 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2555
2556 IEM_MC_ADVANCE_RIP_AND_FINISH();
2557 IEM_MC_END();
2558 }
2559
2560 /**
2561 * @opdone
2562 * @opmnemonic ud660f12m3
2563 * @opcode 0x12
2564 * @opcodesub 11 mr/reg
2565 * @oppfx 0x66
2566 * @opunused immediate
2567 * @opcpuid sse
2568 * @optest ->
2569 */
2570 else
2571 IEMOP_RAISE_INVALID_OPCODE_RET();
2572}
2573
2574
2575/**
2576 * @opcode 0x12
2577 * @oppfx 0xf3
2578 * @opcpuid sse3
2579 * @opgroup og_sse3_pcksclr_datamove
2580 * @opxcpttype 4
2581 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2582 * op1=0x00000002000000020000000100000001
2583 */
2584FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2585{
2586 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2588 if (IEM_IS_MODRM_REG_MODE(bRm))
2589 {
2590 /*
2591 * XMM, XMM.
2592 */
2593 IEM_MC_BEGIN(0, 0);
2594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2595 IEM_MC_LOCAL(RTUINT128U, uSrc);
2596
2597 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2598 IEM_MC_PREPARE_SSE_USAGE();
2599
2600 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2601 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2602 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2603 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2604 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2605
2606 IEM_MC_ADVANCE_RIP_AND_FINISH();
2607 IEM_MC_END();
2608 }
2609 else
2610 {
2611 /*
2612 * XMM, [mem128].
2613 */
2614 IEM_MC_BEGIN(0, 0);
2615 IEM_MC_LOCAL(RTUINT128U, uSrc);
2616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2617
2618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2620 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2621 IEM_MC_PREPARE_SSE_USAGE();
2622
2623 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2624 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2625 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2626 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2627 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2628
2629 IEM_MC_ADVANCE_RIP_AND_FINISH();
2630 IEM_MC_END();
2631 }
2632}
2633
2634
2635/**
2636 * @opcode 0x12
2637 * @oppfx 0xf2
2638 * @opcpuid sse3
2639 * @opgroup og_sse3_pcksclr_datamove
2640 * @opxcpttype 5
2641 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2642 * op1=0x22222222111111112222222211111111
2643 */
2644FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2645{
2646 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2647 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2648 if (IEM_IS_MODRM_REG_MODE(bRm))
2649 {
2650 /*
2651 * XMM128, XMM64.
2652 */
2653 IEM_MC_BEGIN(0, 0);
2654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2655 IEM_MC_LOCAL(uint64_t, uSrc);
2656
2657 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2658 IEM_MC_PREPARE_SSE_USAGE();
2659
2660 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2661 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2662 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2663
2664 IEM_MC_ADVANCE_RIP_AND_FINISH();
2665 IEM_MC_END();
2666 }
2667 else
2668 {
2669 /*
2670 * XMM128, [mem64].
2671 */
2672 IEM_MC_BEGIN(0, 0);
2673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2674 IEM_MC_LOCAL(uint64_t, uSrc);
2675
2676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2678 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2679 IEM_MC_PREPARE_SSE_USAGE();
2680
2681 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2682 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2683 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2684
2685 IEM_MC_ADVANCE_RIP_AND_FINISH();
2686 IEM_MC_END();
2687 }
2688}
2689
2690
2691/**
2692 * @opcode 0x13
2693 * @opcodesub !11 mr/reg
2694 * @oppfx none
2695 * @opcpuid sse
2696 * @opgroup og_sse_simdfp_datamove
2697 * @opxcpttype 5
2698 * @optest op1=1 op2=2 -> op1=2
2699 * @optest op1=0 op2=-42 -> op1=-42
2700 */
2701FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2702{
2703 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2704 if (IEM_IS_MODRM_MEM_MODE(bRm))
2705 {
2706 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2707
2708 IEM_MC_BEGIN(0, 0);
2709 IEM_MC_LOCAL(uint64_t, uSrc);
2710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2711
2712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2714 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2715 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2716
2717 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2718 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2719
2720 IEM_MC_ADVANCE_RIP_AND_FINISH();
2721 IEM_MC_END();
2722 }
2723
2724 /**
2725 * @opdone
2726 * @opmnemonic ud0f13m3
2727 * @opcode 0x13
2728 * @opcodesub 11 mr/reg
2729 * @oppfx none
2730 * @opunused immediate
2731 * @opcpuid sse
2732 * @optest ->
2733 */
2734 else
2735 IEMOP_RAISE_INVALID_OPCODE_RET();
2736}
2737
2738
2739/**
2740 * @opcode 0x13
2741 * @opcodesub !11 mr/reg
2742 * @oppfx 0x66
2743 * @opcpuid sse2
2744 * @opgroup og_sse2_pcksclr_datamove
2745 * @opxcpttype 5
2746 * @optest op1=1 op2=2 -> op1=2
2747 * @optest op1=0 op2=-42 -> op1=-42
2748 */
2749FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2750{
2751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2752 if (IEM_IS_MODRM_MEM_MODE(bRm))
2753 {
2754 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2755
2756 IEM_MC_BEGIN(0, 0);
2757 IEM_MC_LOCAL(uint64_t, uSrc);
2758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2759
2760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2762 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2763 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2764
2765 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2766 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2767
2768 IEM_MC_ADVANCE_RIP_AND_FINISH();
2769 IEM_MC_END();
2770 }
2771
2772 /**
2773 * @opdone
2774 * @opmnemonic ud660f13m3
2775 * @opcode 0x13
2776 * @opcodesub 11 mr/reg
2777 * @oppfx 0x66
2778 * @opunused immediate
2779 * @opcpuid sse
2780 * @optest ->
2781 */
2782 else
2783 IEMOP_RAISE_INVALID_OPCODE_RET();
2784}
2785
2786
2787/**
2788 * @opmnemonic udf30f13
2789 * @opcode 0x13
2790 * @oppfx 0xf3
2791 * @opunused intel-modrm
2792 * @opcpuid sse
2793 * @optest ->
2794 * @opdone
2795 */
2796
2797/**
2798 * @opmnemonic udf20f13
2799 * @opcode 0x13
2800 * @oppfx 0xf2
2801 * @opunused intel-modrm
2802 * @opcpuid sse
2803 * @optest ->
2804 * @opdone
2805 */
2806
2807/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2808FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2809{
2810 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2811 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2812}
2813
2814
2815/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2816FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2817{
2818 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2819 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2820}
2821
2822
2823/**
2824 * @opdone
2825 * @opmnemonic udf30f14
2826 * @opcode 0x14
2827 * @oppfx 0xf3
2828 * @opunused intel-modrm
2829 * @opcpuid sse
2830 * @optest ->
2831 * @opdone
2832 */
2833
2834/**
2835 * @opmnemonic udf20f14
2836 * @opcode 0x14
2837 * @oppfx 0xf2
2838 * @opunused intel-modrm
2839 * @opcpuid sse
2840 * @optest ->
2841 * @opdone
2842 */
2843
2844/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2845FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2846{
2847 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2848 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2849}
2850
2851
2852/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2853FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2854{
2855 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2856 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2857}
2858
2859
2860/* Opcode 0xf3 0x0f 0x15 - invalid */
2861/* Opcode 0xf2 0x0f 0x15 - invalid */
2862
2863/**
2864 * @opdone
2865 * @opmnemonic udf30f15
2866 * @opcode 0x15
2867 * @oppfx 0xf3
2868 * @opunused intel-modrm
2869 * @opcpuid sse
2870 * @optest ->
2871 * @opdone
2872 */
2873
2874/**
2875 * @opmnemonic udf20f15
2876 * @opcode 0x15
2877 * @oppfx 0xf2
2878 * @opunused intel-modrm
2879 * @opcpuid sse
2880 * @optest ->
2881 * @opdone
2882 */
2883
2884FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2885{
2886 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2887 if (IEM_IS_MODRM_REG_MODE(bRm))
2888 {
2889 /**
2890 * @opcode 0x16
2891 * @opcodesub 11 mr/reg
2892 * @oppfx none
2893 * @opcpuid sse
2894 * @opgroup og_sse_simdfp_datamove
2895 * @opxcpttype 5
2896 * @optest op1=1 op2=2 -> op1=2
2897 * @optest op1=0 op2=-42 -> op1=-42
2898 */
2899 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2900
2901 IEM_MC_BEGIN(0, 0);
2902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2903 IEM_MC_LOCAL(uint64_t, uSrc);
2904
2905 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2906 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2907 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2908 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2909
2910 IEM_MC_ADVANCE_RIP_AND_FINISH();
2911 IEM_MC_END();
2912 }
2913 else
2914 {
2915 /**
2916 * @opdone
2917 * @opcode 0x16
2918 * @opcodesub !11 mr/reg
2919 * @oppfx none
2920 * @opcpuid sse
2921 * @opgroup og_sse_simdfp_datamove
2922 * @opxcpttype 5
2923 * @optest op1=1 op2=2 -> op1=2
2924 * @optest op1=0 op2=-42 -> op1=-42
2925 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2926 */
2927 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2928
2929 IEM_MC_BEGIN(0, 0);
2930 IEM_MC_LOCAL(uint64_t, uSrc);
2931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2932
2933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2935 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2936 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2937
2938 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2939 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2940
2941 IEM_MC_ADVANCE_RIP_AND_FINISH();
2942 IEM_MC_END();
2943 }
2944}
2945
2946
2947/**
2948 * @opcode 0x16
2949 * @opcodesub !11 mr/reg
2950 * @oppfx 0x66
2951 * @opcpuid sse2
2952 * @opgroup og_sse2_pcksclr_datamove
2953 * @opxcpttype 5
2954 * @optest op1=1 op2=2 -> op1=2
2955 * @optest op1=0 op2=-42 -> op1=-42
2956 */
2957FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2958{
2959 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2960 if (IEM_IS_MODRM_MEM_MODE(bRm))
2961 {
2962 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2963
2964 IEM_MC_BEGIN(0, 0);
2965 IEM_MC_LOCAL(uint64_t, uSrc);
2966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2967
2968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2970 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2971 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2972
2973 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2974 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2975
2976 IEM_MC_ADVANCE_RIP_AND_FINISH();
2977 IEM_MC_END();
2978 }
2979
2980 /**
2981 * @opdone
2982 * @opmnemonic ud660f16m3
2983 * @opcode 0x16
2984 * @opcodesub 11 mr/reg
2985 * @oppfx 0x66
2986 * @opunused immediate
2987 * @opcpuid sse
2988 * @optest ->
2989 */
2990 else
2991 IEMOP_RAISE_INVALID_OPCODE_RET();
2992}
2993
2994
2995/**
2996 * @opcode 0x16
2997 * @oppfx 0xf3
2998 * @opcpuid sse3
2999 * @opgroup og_sse3_pcksclr_datamove
3000 * @opxcpttype 4
3001 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3002 * op1=0x00000002000000020000000100000001
3003 */
3004FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3005{
3006 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3008 if (IEM_IS_MODRM_REG_MODE(bRm))
3009 {
3010 /*
3011 * XMM128, XMM128.
3012 */
3013 IEM_MC_BEGIN(0, 0);
3014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3015 IEM_MC_LOCAL(RTUINT128U, uSrc);
3016
3017 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3018 IEM_MC_PREPARE_SSE_USAGE();
3019
3020 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3021 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3022 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3023 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3024 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3025
3026 IEM_MC_ADVANCE_RIP_AND_FINISH();
3027 IEM_MC_END();
3028 }
3029 else
3030 {
3031 /*
3032 * XMM128, [mem128].
3033 */
3034 IEM_MC_BEGIN(0, 0);
3035 IEM_MC_LOCAL(RTUINT128U, uSrc);
3036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3037
3038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3040 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3041 IEM_MC_PREPARE_SSE_USAGE();
3042
3043 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3044 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3045 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3046 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3047 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3048
3049 IEM_MC_ADVANCE_RIP_AND_FINISH();
3050 IEM_MC_END();
3051 }
3052}
3053
3054/**
3055 * @opdone
3056 * @opmnemonic udf30f16
3057 * @opcode 0x16
3058 * @oppfx 0xf2
3059 * @opunused intel-modrm
3060 * @opcpuid sse
3061 * @optest ->
3062 * @opdone
3063 */
3064
3065
3066/**
3067 * @opcode 0x17
3068 * @opcodesub !11 mr/reg
3069 * @oppfx none
3070 * @opcpuid sse
3071 * @opgroup og_sse_simdfp_datamove
3072 * @opxcpttype 5
3073 * @optest op1=1 op2=2 -> op1=2
3074 * @optest op1=0 op2=-42 -> op1=-42
3075 */
3076FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3077{
3078 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3079 if (IEM_IS_MODRM_MEM_MODE(bRm))
3080 {
3081 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3082
3083 IEM_MC_BEGIN(0, 0);
3084 IEM_MC_LOCAL(uint64_t, uSrc);
3085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3086
3087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3089 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3090 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3091
3092 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3093 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3094
3095 IEM_MC_ADVANCE_RIP_AND_FINISH();
3096 IEM_MC_END();
3097 }
3098
3099 /**
3100 * @opdone
3101 * @opmnemonic ud0f17m3
3102 * @opcode 0x17
3103 * @opcodesub 11 mr/reg
3104 * @oppfx none
3105 * @opunused immediate
3106 * @opcpuid sse
3107 * @optest ->
3108 */
3109 else
3110 IEMOP_RAISE_INVALID_OPCODE_RET();
3111}
3112
3113
3114/**
3115 * @opcode 0x17
3116 * @opcodesub !11 mr/reg
3117 * @oppfx 0x66
3118 * @opcpuid sse2
3119 * @opgroup og_sse2_pcksclr_datamove
3120 * @opxcpttype 5
3121 * @optest op1=1 op2=2 -> op1=2
3122 * @optest op1=0 op2=-42 -> op1=-42
3123 */
3124FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3125{
3126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3127 if (IEM_IS_MODRM_MEM_MODE(bRm))
3128 {
3129 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3130
3131 IEM_MC_BEGIN(0, 0);
3132 IEM_MC_LOCAL(uint64_t, uSrc);
3133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3134
3135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3137 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3138 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3139
3140 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3141 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3142
3143 IEM_MC_ADVANCE_RIP_AND_FINISH();
3144 IEM_MC_END();
3145 }
3146
3147 /**
3148 * @opdone
3149 * @opmnemonic ud660f17m3
3150 * @opcode 0x17
3151 * @opcodesub 11 mr/reg
3152 * @oppfx 0x66
3153 * @opunused immediate
3154 * @opcpuid sse
3155 * @optest ->
3156 */
3157 else
3158 IEMOP_RAISE_INVALID_OPCODE_RET();
3159}
3160
3161
3162/**
3163 * @opdone
3164 * @opmnemonic udf30f17
3165 * @opcode 0x17
3166 * @oppfx 0xf3
3167 * @opunused intel-modrm
3168 * @opcpuid sse
3169 * @optest ->
3170 * @opdone
3171 */
3172
3173/**
3174 * @opmnemonic udf20f17
3175 * @opcode 0x17
3176 * @oppfx 0xf2
3177 * @opunused intel-modrm
3178 * @opcpuid sse
3179 * @optest ->
3180 * @opdone
3181 */
3182
3183
3184/** Opcode 0x0f 0x18. */
3185FNIEMOP_DEF(iemOp_prefetch_Grp16)
3186{
3187 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3188 if (IEM_IS_MODRM_MEM_MODE(bRm))
3189 {
3190 switch (IEM_GET_MODRM_REG_8(bRm))
3191 {
3192 case 4: /* Aliased to /0 for the time being according to AMD. */
3193 case 5: /* Aliased to /0 for the time being according to AMD. */
3194 case 6: /* Aliased to /0 for the time being according to AMD. */
3195 case 7: /* Aliased to /0 for the time being according to AMD. */
3196 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3197 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3198 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3199 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3201 }
3202
3203 IEM_MC_BEGIN(0, 0);
3204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3207 /* Currently a NOP. */
3208 IEM_MC_NOREF(GCPtrEffSrc);
3209 IEM_MC_ADVANCE_RIP_AND_FINISH();
3210 IEM_MC_END();
3211 }
3212 else
3213 IEMOP_RAISE_INVALID_OPCODE_RET();
3214}
3215
3216
3217/** Opcode 0x0f 0x19..0x1f. */
3218FNIEMOP_DEF(iemOp_nop_Ev)
3219{
3220 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3222 if (IEM_IS_MODRM_REG_MODE(bRm))
3223 {
3224 IEM_MC_BEGIN(0, 0);
3225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3226 IEM_MC_ADVANCE_RIP_AND_FINISH();
3227 IEM_MC_END();
3228 }
3229 else
3230 {
3231 IEM_MC_BEGIN(0, 0);
3232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3235 /* Currently a NOP. */
3236 IEM_MC_NOREF(GCPtrEffSrc);
3237 IEM_MC_ADVANCE_RIP_AND_FINISH();
3238 IEM_MC_END();
3239 }
3240}
3241
3242
3243/** Opcode 0x0f 0x20. */
3244FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3245{
3246 /* mod is ignored, as is operand size overrides. */
3247/** @todo testcase: check memory encoding. */
3248 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3249 IEMOP_HLP_MIN_386();
3250 if (IEM_IS_64BIT_CODE(pVCpu))
3251 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3252 else
3253 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3254
3255 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3256 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3257 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3258 {
3259 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3260 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3261 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3262 iCrReg |= 8;
3263 }
3264 switch (iCrReg)
3265 {
3266 case 0: case 2: case 3: case 4: case 8:
3267 break;
3268 default:
3269 IEMOP_RAISE_INVALID_OPCODE_RET();
3270 }
3271 IEMOP_HLP_DONE_DECODING();
3272
3273 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3274 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3275 iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3276}
3277
3278
3279/** Opcode 0x0f 0x21. */
3280FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3281{
3282/** @todo testcase: check memory encoding. */
3283 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3284 IEMOP_HLP_MIN_386();
3285 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3287 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3288 IEMOP_RAISE_INVALID_OPCODE_RET();
3289 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3290 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3291 iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3292}
3293
3294
3295/** Opcode 0x0f 0x22. */
3296FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3297{
3298 /* mod is ignored, as is operand size overrides. */
3299 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3300 IEMOP_HLP_MIN_386();
3301 if (IEM_IS_64BIT_CODE(pVCpu))
3302 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3303 else
3304 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3305
3306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3307 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3308 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3309 {
3310 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3311 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3312 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3313 iCrReg |= 8;
3314 }
3315 switch (iCrReg)
3316 {
3317 case 0: case 2: case 3: case 4: case 8:
3318 break;
3319 default:
3320 IEMOP_RAISE_INVALID_OPCODE_RET();
3321 }
3322 IEMOP_HLP_DONE_DECODING();
3323
3324 /** @todo r=aeichner Split this up as flushing the cr0 is excessive for crX != 0? */
3325 if (iCrReg & (2 | 8))
3326 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, 0,
3327 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3328 else
3329 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0) | RT_BIT_64(kIemNativeGstReg_Cr4),
3330 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3331}
3332
3333
3334/** Opcode 0x0f 0x23. */
3335FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3336{
3337 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3338 IEMOP_HLP_MIN_386();
3339 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3341 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3342 IEMOP_RAISE_INVALID_OPCODE_RET();
3343 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, 0,
3344 iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3345}
3346
3347
3348/** Opcode 0x0f 0x24. */
3349FNIEMOP_DEF(iemOp_mov_Rd_Td)
3350{
3351 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3352 IEMOP_HLP_MIN_386();
3353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3355 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3356 IEMOP_RAISE_INVALID_OPCODE_RET();
3357 IEM_MC_DEFER_TO_CIMPL_2_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3358 iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3359}
3360
3361
3362/** Opcode 0x0f 0x26. */
3363FNIEMOP_DEF(iemOp_mov_Td_Rd)
3364{
3365 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3366 IEMOP_HLP_MIN_386();
3367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3369 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3370 IEMOP_RAISE_INVALID_OPCODE_RET();
3371 IEM_MC_DEFER_TO_CIMPL_2_RET(0, 0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3372}
3373
3374
3375/**
3376 * @opcode 0x28
3377 * @oppfx none
3378 * @opcpuid sse
3379 * @opgroup og_sse_simdfp_datamove
3380 * @opxcpttype 1
3381 * @optest op1=1 op2=2 -> op1=2
3382 * @optest op1=0 op2=-42 -> op1=-42
3383 */
3384FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3385{
3386 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3387 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3388 if (IEM_IS_MODRM_REG_MODE(bRm))
3389 {
3390 /*
3391 * Register, register.
3392 */
3393 IEM_MC_BEGIN(0, 0);
3394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3395 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3396 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3397 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3398 IEM_GET_MODRM_RM(pVCpu, bRm));
3399 IEM_MC_ADVANCE_RIP_AND_FINISH();
3400 IEM_MC_END();
3401 }
3402 else
3403 {
3404 /*
3405 * Register, memory.
3406 */
3407 IEM_MC_BEGIN(0, 0);
3408 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3410
3411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3413 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3414 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3415
3416 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3417 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3418
3419 IEM_MC_ADVANCE_RIP_AND_FINISH();
3420 IEM_MC_END();
3421 }
3422}
3423
3424/**
3425 * @opcode 0x28
3426 * @oppfx 66
3427 * @opcpuid sse2
3428 * @opgroup og_sse2_pcksclr_datamove
3429 * @opxcpttype 1
3430 * @optest op1=1 op2=2 -> op1=2
3431 * @optest op1=0 op2=-42 -> op1=-42
3432 */
3433FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3434{
3435 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3436 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3437 if (IEM_IS_MODRM_REG_MODE(bRm))
3438 {
3439 /*
3440 * Register, register.
3441 */
3442 IEM_MC_BEGIN(0, 0);
3443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3444 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3445 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3446 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3447 IEM_GET_MODRM_RM(pVCpu, bRm));
3448 IEM_MC_ADVANCE_RIP_AND_FINISH();
3449 IEM_MC_END();
3450 }
3451 else
3452 {
3453 /*
3454 * Register, memory.
3455 */
3456 IEM_MC_BEGIN(0, 0);
3457 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3459
3460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3462 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3463 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3464
3465 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3466 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3467
3468 IEM_MC_ADVANCE_RIP_AND_FINISH();
3469 IEM_MC_END();
3470 }
3471}
3472
3473/* Opcode 0xf3 0x0f 0x28 - invalid */
3474/* Opcode 0xf2 0x0f 0x28 - invalid */
3475
3476/**
3477 * @opcode 0x29
3478 * @oppfx none
3479 * @opcpuid sse
3480 * @opgroup og_sse_simdfp_datamove
3481 * @opxcpttype 1
3482 * @optest op1=1 op2=2 -> op1=2
3483 * @optest op1=0 op2=-42 -> op1=-42
3484 */
3485FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3486{
3487 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3488 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3489 if (IEM_IS_MODRM_REG_MODE(bRm))
3490 {
3491 /*
3492 * Register, register.
3493 */
3494 IEM_MC_BEGIN(0, 0);
3495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3496 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3497 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3498 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3499 IEM_GET_MODRM_REG(pVCpu, bRm));
3500 IEM_MC_ADVANCE_RIP_AND_FINISH();
3501 IEM_MC_END();
3502 }
3503 else
3504 {
3505 /*
3506 * Memory, register.
3507 */
3508 IEM_MC_BEGIN(0, 0);
3509 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3511
3512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3514 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3515 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3516
3517 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3518 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3519
3520 IEM_MC_ADVANCE_RIP_AND_FINISH();
3521 IEM_MC_END();
3522 }
3523}
3524
3525/**
3526 * @opcode 0x29
3527 * @oppfx 66
3528 * @opcpuid sse2
3529 * @opgroup og_sse2_pcksclr_datamove
3530 * @opxcpttype 1
3531 * @optest op1=1 op2=2 -> op1=2
3532 * @optest op1=0 op2=-42 -> op1=-42
3533 */
3534FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3535{
3536 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3538 if (IEM_IS_MODRM_REG_MODE(bRm))
3539 {
3540 /*
3541 * Register, register.
3542 */
3543 IEM_MC_BEGIN(0, 0);
3544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3545 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3546 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3547 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3548 IEM_GET_MODRM_REG(pVCpu, bRm));
3549 IEM_MC_ADVANCE_RIP_AND_FINISH();
3550 IEM_MC_END();
3551 }
3552 else
3553 {
3554 /*
3555 * Memory, register.
3556 */
3557 IEM_MC_BEGIN(0, 0);
3558 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3560
3561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3563 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3564 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3565
3566 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3567 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3568
3569 IEM_MC_ADVANCE_RIP_AND_FINISH();
3570 IEM_MC_END();
3571 }
3572}
3573
3574/* Opcode 0xf3 0x0f 0x29 - invalid */
3575/* Opcode 0xf2 0x0f 0x29 - invalid */
3576
3577
3578/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3579FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3580{
3581 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3582 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3583 if (IEM_IS_MODRM_REG_MODE(bRm))
3584 {
3585 /*
3586 * XMM, MMX
3587 */
3588 IEM_MC_BEGIN(0, 0);
3589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3590 IEM_MC_LOCAL(X86XMMREG, Dst);
3591 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3592 IEM_MC_ARG(uint64_t, u64Src, 1);
3593 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3594 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3595 IEM_MC_PREPARE_FPU_USAGE();
3596 IEM_MC_FPU_TO_MMX_MODE();
3597
3598 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3599 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3600
3601 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2ps_u128, pDst, u64Src);
3602 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3603
3604 IEM_MC_ADVANCE_RIP_AND_FINISH();
3605 IEM_MC_END();
3606 }
3607 else
3608 {
3609 /*
3610 * XMM, [mem64]
3611 */
3612 IEM_MC_BEGIN(0, 0);
3613 IEM_MC_LOCAL(X86XMMREG, Dst);
3614 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3615 IEM_MC_ARG(uint64_t, u64Src, 1);
3616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3617
3618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3620 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3621 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3622 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3623
3624 IEM_MC_PREPARE_FPU_USAGE();
3625 IEM_MC_FPU_TO_MMX_MODE();
3626
3627 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2ps_u128, pDst, u64Src);
3628 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3629
3630 IEM_MC_ADVANCE_RIP_AND_FINISH();
3631 IEM_MC_END();
3632 }
3633}
3634
3635
3636/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3637FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3638{
3639 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3640 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3641 if (IEM_IS_MODRM_REG_MODE(bRm))
3642 {
3643 /*
3644 * XMM, MMX
3645 */
3646 IEM_MC_BEGIN(0, 0);
3647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3648 IEM_MC_LOCAL(X86XMMREG, Dst);
3649 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3650 IEM_MC_ARG(uint64_t, u64Src, 1);
3651 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3652 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3653 IEM_MC_PREPARE_FPU_USAGE();
3654 IEM_MC_FPU_TO_MMX_MODE();
3655
3656 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3657
3658 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2pd_u128, pDst, u64Src);
3659 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3660
3661 IEM_MC_ADVANCE_RIP_AND_FINISH();
3662 IEM_MC_END();
3663 }
3664 else
3665 {
3666 /*
3667 * XMM, [mem64]
3668 */
3669 IEM_MC_BEGIN(0, 0);
3670 IEM_MC_LOCAL(X86XMMREG, Dst);
3671 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3672 IEM_MC_ARG(uint64_t, u64Src, 1);
3673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3674
3675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3677 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3678 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3679 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3680
3681 /* Doesn't cause a transition to MMX mode. */
3682 IEM_MC_PREPARE_SSE_USAGE();
3683
3684 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2pd_u128, pDst, u64Src);
3685 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3686
3687 IEM_MC_ADVANCE_RIP_AND_FINISH();
3688 IEM_MC_END();
3689 }
3690}
3691
3692
3693/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3694FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3695{
3696 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3697
3698 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3699 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3700 {
3701 if (IEM_IS_MODRM_REG_MODE(bRm))
3702 {
3703 /* XMM, greg64 */
3704 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3705 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3706 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3707 IEM_MC_ARG(const int64_t *, pi64Src, 1);
3708
3709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3710 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3711 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3712
3713 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3714 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i64, pr32Dst, pi64Src);
3715 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3716
3717 IEM_MC_ADVANCE_RIP_AND_FINISH();
3718 IEM_MC_END();
3719 }
3720 else
3721 {
3722 /* XMM, [mem64] */
3723 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3725 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3726 IEM_MC_LOCAL(int64_t, i64Src);
3727 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3728 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 1);
3729
3730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3732 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3733 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3734
3735 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3736 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i64, pr32Dst, pi64Src);
3737 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3738
3739 IEM_MC_ADVANCE_RIP_AND_FINISH();
3740 IEM_MC_END();
3741 }
3742 }
3743 else
3744 {
3745 if (IEM_IS_MODRM_REG_MODE(bRm))
3746 {
3747 /* greg, XMM */
3748 IEM_MC_BEGIN(0, 0);
3749 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3750 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3751 IEM_MC_ARG(const int32_t *, pi32Src, 1);
3752
3753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3754 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3755 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3756
3757 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3758 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i32, pr32Dst, pi32Src);
3759 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3760
3761 IEM_MC_ADVANCE_RIP_AND_FINISH();
3762 IEM_MC_END();
3763 }
3764 else
3765 {
3766 /* greg, [mem32] */
3767 IEM_MC_BEGIN(0, 0);
3768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3769 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3770 IEM_MC_LOCAL(int32_t, i32Src);
3771 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3772 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 1);
3773
3774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3776 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3777 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3778
3779 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3780 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i32, pr32Dst, pi32Src);
3781 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3782
3783 IEM_MC_ADVANCE_RIP_AND_FINISH();
3784 IEM_MC_END();
3785 }
3786 }
3787}
3788
3789
3790/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3791FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3792{
3793 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3794
3795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3796 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3797 {
3798 if (IEM_IS_MODRM_REG_MODE(bRm))
3799 {
3800 /* XMM, greg64 */
3801 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3802 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3803 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3804 IEM_MC_ARG(const int64_t *, pi64Src, 1);
3805
3806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3807 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3808 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3809
3810 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3811 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i64, pr64Dst, pi64Src);
3812 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3813
3814 IEM_MC_ADVANCE_RIP_AND_FINISH();
3815 IEM_MC_END();
3816 }
3817 else
3818 {
3819 /* XMM, [mem64] */
3820 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3822 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3823 IEM_MC_LOCAL(int64_t, i64Src);
3824 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3825 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 1);
3826
3827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3829 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3830 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3831
3832 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3833 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i64, pr64Dst, pi64Src);
3834 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3835
3836 IEM_MC_ADVANCE_RIP_AND_FINISH();
3837 IEM_MC_END();
3838 }
3839 }
3840 else
3841 {
3842 if (IEM_IS_MODRM_REG_MODE(bRm))
3843 {
3844 /* XMM, greg32 */
3845 IEM_MC_BEGIN(0, 0);
3846 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3847 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3848 IEM_MC_ARG(const int32_t *, pi32Src, 1);
3849
3850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3851 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3852 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3853
3854 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3855 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i32, pr64Dst, pi32Src);
3856 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3857
3858 IEM_MC_ADVANCE_RIP_AND_FINISH();
3859 IEM_MC_END();
3860 }
3861 else
3862 {
3863 /* XMM, [mem32] */
3864 IEM_MC_BEGIN(0, 0);
3865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3866 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3867 IEM_MC_LOCAL(int32_t, i32Src);
3868 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3869 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 1);
3870
3871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3873 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3874 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3875
3876 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3877 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i32, pr64Dst, pi32Src);
3878 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3879
3880 IEM_MC_ADVANCE_RIP_AND_FINISH();
3881 IEM_MC_END();
3882 }
3883 }
3884}
3885
3886
3887/**
3888 * @opcode 0x2b
3889 * @opcodesub !11 mr/reg
3890 * @oppfx none
3891 * @opcpuid sse
3892 * @opgroup og_sse1_cachect
3893 * @opxcpttype 1
3894 * @optest op1=1 op2=2 -> op1=2
3895 * @optest op1=0 op2=-42 -> op1=-42
3896 */
3897FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
3898{
3899 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3900 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3901 if (IEM_IS_MODRM_MEM_MODE(bRm))
3902 {
3903 /*
3904 * memory, register.
3905 */
3906 IEM_MC_BEGIN(0, 0);
3907 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3909
3910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3912 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3913 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3914
3915 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3916 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3917
3918 IEM_MC_ADVANCE_RIP_AND_FINISH();
3919 IEM_MC_END();
3920 }
3921 /* The register, register encoding is invalid. */
3922 else
3923 IEMOP_RAISE_INVALID_OPCODE_RET();
3924}
3925
3926/**
3927 * @opcode 0x2b
3928 * @opcodesub !11 mr/reg
3929 * @oppfx 0x66
3930 * @opcpuid sse2
3931 * @opgroup og_sse2_cachect
3932 * @opxcpttype 1
3933 * @optest op1=1 op2=2 -> op1=2
3934 * @optest op1=0 op2=-42 -> op1=-42
3935 */
3936FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
3937{
3938 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3939 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3940 if (IEM_IS_MODRM_MEM_MODE(bRm))
3941 {
3942 /*
3943 * memory, register.
3944 */
3945 IEM_MC_BEGIN(0, 0);
3946 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3948
3949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3951 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3952 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3953
3954 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3955 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3956
3957 IEM_MC_ADVANCE_RIP_AND_FINISH();
3958 IEM_MC_END();
3959 }
3960 /* The register, register encoding is invalid. */
3961 else
3962 IEMOP_RAISE_INVALID_OPCODE_RET();
3963}
3964/* Opcode 0xf3 0x0f 0x2b - invalid */
3965/* Opcode 0xf2 0x0f 0x2b - invalid */
3966
3967
3968/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
3969FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
3970{
3971 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3972 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3973 if (IEM_IS_MODRM_REG_MODE(bRm))
3974 {
3975 /*
3976 * Register, register.
3977 */
3978 IEM_MC_BEGIN(0, 0);
3979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3980 IEM_MC_LOCAL(uint64_t, u64Dst);
3981 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
3982 IEM_MC_ARG(uint64_t, u64Src, 1);
3983 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3984 IEM_MC_PREPARE_FPU_USAGE();
3985 IEM_MC_FPU_TO_MMX_MODE();
3986
3987 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
3988
3989 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttps2pi_u128, pu64Dst, u64Src);
3990 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
3991
3992 IEM_MC_ADVANCE_RIP_AND_FINISH();
3993 IEM_MC_END();
3994 }
3995 else
3996 {
3997 /*
3998 * Register, memory.
3999 */
4000 IEM_MC_BEGIN(0, 0);
4001 IEM_MC_LOCAL(uint64_t, u64Dst);
4002 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4003 IEM_MC_ARG(uint64_t, u64Src, 1);
4004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4005
4006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4008 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4009 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4010
4011 IEM_MC_PREPARE_FPU_USAGE();
4012 IEM_MC_FPU_TO_MMX_MODE();
4013
4014 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttps2pi_u128, pu64Dst, u64Src);
4015 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4016
4017 IEM_MC_ADVANCE_RIP_AND_FINISH();
4018 IEM_MC_END();
4019 }
4020}
4021
4022
4023/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4024FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4025{
4026 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4027 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4028 if (IEM_IS_MODRM_REG_MODE(bRm))
4029 {
4030 /*
4031 * Register, register.
4032 */
4033 IEM_MC_BEGIN(0, 0);
4034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4035 IEM_MC_LOCAL(uint64_t, u64Dst);
4036 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4037 IEM_MC_ARG(PCX86XMMREG, pSrc, 1);
4038 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4039 IEM_MC_PREPARE_FPU_USAGE();
4040 IEM_MC_FPU_TO_MMX_MODE();
4041
4042 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4043
4044 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttpd2pi_u128, pu64Dst, pSrc);
4045 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4046
4047 IEM_MC_ADVANCE_RIP_AND_FINISH();
4048 IEM_MC_END();
4049 }
4050 else
4051 {
4052 /*
4053 * Register, memory.
4054 */
4055 IEM_MC_BEGIN(0, 0);
4056 IEM_MC_LOCAL(uint64_t, u64Dst);
4057 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4058 IEM_MC_LOCAL(X86XMMREG, uSrc);
4059 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 1);
4060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4061
4062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4064 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4065 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4066
4067 IEM_MC_PREPARE_FPU_USAGE();
4068 IEM_MC_FPU_TO_MMX_MODE();
4069
4070 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttpd2pi_u128, pu64Dst, pSrc);
4071 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4072
4073 IEM_MC_ADVANCE_RIP_AND_FINISH();
4074 IEM_MC_END();
4075 }
4076}
4077
4078
4079/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4080FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4081{
4082 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4083
4084 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4085 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4086 {
4087 if (IEM_IS_MODRM_REG_MODE(bRm))
4088 {
4089 /* greg64, XMM */
4090 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4091 IEM_MC_LOCAL(int64_t, i64Dst);
4092 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4093 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4094
4095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4096 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4097 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4098
4099 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4100 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i64_r32, pi64Dst, pu32Src);
4101 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4102
4103 IEM_MC_ADVANCE_RIP_AND_FINISH();
4104 IEM_MC_END();
4105 }
4106 else
4107 {
4108 /* greg64, [mem64] */
4109 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4111 IEM_MC_LOCAL(int64_t, i64Dst);
4112 IEM_MC_LOCAL(uint32_t, u32Src);
4113 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4114 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4115
4116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4118 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4119 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4120
4121 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4122 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i64_r32, pi64Dst, pu32Src);
4123 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4124
4125 IEM_MC_ADVANCE_RIP_AND_FINISH();
4126 IEM_MC_END();
4127 }
4128 }
4129 else
4130 {
4131 if (IEM_IS_MODRM_REG_MODE(bRm))
4132 {
4133 /* greg, XMM */
4134 IEM_MC_BEGIN(0, 0);
4135 IEM_MC_LOCAL(int32_t, i32Dst);
4136 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4137 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4138
4139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4140 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4141 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4142
4143 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4144 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i32_r32, pi32Dst, pu32Src);
4145 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4146
4147 IEM_MC_ADVANCE_RIP_AND_FINISH();
4148 IEM_MC_END();
4149 }
4150 else
4151 {
4152 /* greg, [mem] */
4153 IEM_MC_BEGIN(0, 0);
4154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4155 IEM_MC_LOCAL(int32_t, i32Dst);
4156 IEM_MC_LOCAL(uint32_t, u32Src);
4157 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4158 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4159
4160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4162 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4163 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4164
4165 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4166 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i32_r32, pi32Dst, pu32Src);
4167 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4168
4169 IEM_MC_ADVANCE_RIP_AND_FINISH();
4170 IEM_MC_END();
4171 }
4172 }
4173}
4174
4175
4176/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4177FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4178{
4179 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4180
4181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4182 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4183 {
4184 if (IEM_IS_MODRM_REG_MODE(bRm))
4185 {
4186 /* greg64, XMM */
4187 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4188 IEM_MC_LOCAL(int64_t, i64Dst);
4189 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4190 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4191
4192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4193 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4194 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4195
4196 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4197 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i64_r64, pi64Dst, pu64Src);
4198 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4199
4200 IEM_MC_ADVANCE_RIP_AND_FINISH();
4201 IEM_MC_END();
4202 }
4203 else
4204 {
4205 /* greg64, [mem64] */
4206 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4208 IEM_MC_LOCAL(int64_t, i64Dst);
4209 IEM_MC_LOCAL(uint64_t, u64Src);
4210 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4211 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4212
4213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4215 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4216 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4217
4218 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4219 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i64_r64, pi64Dst, pu64Src);
4220 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4221
4222 IEM_MC_ADVANCE_RIP_AND_FINISH();
4223 IEM_MC_END();
4224 }
4225 }
4226 else
4227 {
4228 if (IEM_IS_MODRM_REG_MODE(bRm))
4229 {
4230 /* greg, XMM */
4231 IEM_MC_BEGIN(0, 0);
4232 IEM_MC_LOCAL(int32_t, i32Dst);
4233 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4234 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4235
4236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4237 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4238 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4239
4240 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4241 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i32_r64, pi32Dst, pu64Src);
4242 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4243
4244 IEM_MC_ADVANCE_RIP_AND_FINISH();
4245 IEM_MC_END();
4246 }
4247 else
4248 {
4249 /* greg32, [mem32] */
4250 IEM_MC_BEGIN(0, 0);
4251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4252 IEM_MC_LOCAL(int32_t, i32Dst);
4253 IEM_MC_LOCAL(uint64_t, u64Src);
4254 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4255 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4256
4257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4259 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4260 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4261
4262 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4263 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i32_r64, pi32Dst, pu64Src);
4264 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4265
4266 IEM_MC_ADVANCE_RIP_AND_FINISH();
4267 IEM_MC_END();
4268 }
4269 }
4270}
4271
4272
4273/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4274FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4275{
4276 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4277 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4278 if (IEM_IS_MODRM_REG_MODE(bRm))
4279 {
4280 /*
4281 * Register, register.
4282 */
4283 IEM_MC_BEGIN(0, 0);
4284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4285 IEM_MC_LOCAL(uint64_t, u64Dst);
4286 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4287 IEM_MC_ARG(uint64_t, u64Src, 1);
4288
4289 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4290 IEM_MC_PREPARE_FPU_USAGE();
4291 IEM_MC_FPU_TO_MMX_MODE();
4292
4293 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4294
4295 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pi_u128, pu64Dst, u64Src);
4296 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4297
4298 IEM_MC_ADVANCE_RIP_AND_FINISH();
4299 IEM_MC_END();
4300 }
4301 else
4302 {
4303 /*
4304 * Register, memory.
4305 */
4306 IEM_MC_BEGIN(0, 0);
4307 IEM_MC_LOCAL(uint64_t, u64Dst);
4308 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4309 IEM_MC_ARG(uint64_t, u64Src, 1);
4310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4311
4312 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4314 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4315 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4316
4317 IEM_MC_PREPARE_FPU_USAGE();
4318 IEM_MC_FPU_TO_MMX_MODE();
4319
4320 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pi_u128, pu64Dst, u64Src);
4321 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4322
4323 IEM_MC_ADVANCE_RIP_AND_FINISH();
4324 IEM_MC_END();
4325 }
4326}
4327
4328
4329/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4330FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4331{
4332 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4333 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4334 if (IEM_IS_MODRM_REG_MODE(bRm))
4335 {
4336 /*
4337 * Register, register.
4338 */
4339 IEM_MC_BEGIN(0, 0);
4340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4341 IEM_MC_LOCAL(uint64_t, u64Dst);
4342 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4343 IEM_MC_ARG(PCX86XMMREG, pSrc, 1);
4344
4345 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4346 IEM_MC_PREPARE_FPU_USAGE();
4347 IEM_MC_FPU_TO_MMX_MODE();
4348
4349 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4350
4351 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpd2pi_u128, pu64Dst, pSrc);
4352 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4353
4354 IEM_MC_ADVANCE_RIP_AND_FINISH();
4355 IEM_MC_END();
4356 }
4357 else
4358 {
4359 /*
4360 * Register, memory.
4361 */
4362 IEM_MC_BEGIN(0, 0);
4363 IEM_MC_LOCAL(uint64_t, u64Dst);
4364 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4365 IEM_MC_LOCAL(X86XMMREG, uSrc);
4366 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 1);
4367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4368
4369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4371 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4372 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4373
4374 IEM_MC_PREPARE_FPU_USAGE();
4375 IEM_MC_FPU_TO_MMX_MODE();
4376
4377 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpd2pi_u128, pu64Dst, pSrc);
4378 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4379
4380 IEM_MC_ADVANCE_RIP_AND_FINISH();
4381 IEM_MC_END();
4382 }
4383}
4384
4385
4386/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4387FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4388{
4389 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4390
4391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4392 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4393 {
4394 if (IEM_IS_MODRM_REG_MODE(bRm))
4395 {
4396 /* greg64, XMM */
4397 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4398 IEM_MC_LOCAL(int64_t, i64Dst);
4399 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4400 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4401
4402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4403 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4404 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4405
4406 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4407 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i64_r32, pi64Dst, pu32Src);
4408 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4409
4410 IEM_MC_ADVANCE_RIP_AND_FINISH();
4411 IEM_MC_END();
4412 }
4413 else
4414 {
4415 /* greg64, [mem64] */
4416 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4418 IEM_MC_LOCAL(int64_t, i64Dst);
4419 IEM_MC_LOCAL(uint32_t, u32Src);
4420 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4421 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4422
4423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4425 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4426 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4427
4428 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4429 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i64_r32, pi64Dst, pu32Src);
4430 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4431
4432 IEM_MC_ADVANCE_RIP_AND_FINISH();
4433 IEM_MC_END();
4434 }
4435 }
4436 else
4437 {
4438 if (IEM_IS_MODRM_REG_MODE(bRm))
4439 {
4440 /* greg, XMM */
4441 IEM_MC_BEGIN(0, 0);
4442 IEM_MC_LOCAL(int32_t, i32Dst);
4443 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4444 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4445
4446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4447 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4448 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4449
4450 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4451 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i32_r32, pi32Dst, pu32Src);
4452 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4453
4454 IEM_MC_ADVANCE_RIP_AND_FINISH();
4455 IEM_MC_END();
4456 }
4457 else
4458 {
4459 /* greg, [mem] */
4460 IEM_MC_BEGIN(0, 0);
4461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4462 IEM_MC_LOCAL(int32_t, i32Dst);
4463 IEM_MC_LOCAL(uint32_t, u32Src);
4464 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4465 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4466
4467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4469 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4470 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4471
4472 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4473 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i32_r32, pi32Dst, pu32Src);
4474 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4475
4476 IEM_MC_ADVANCE_RIP_AND_FINISH();
4477 IEM_MC_END();
4478 }
4479 }
4480}
4481
4482
4483/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4484FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4485{
4486 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4487
4488 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4489 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4490 {
4491 if (IEM_IS_MODRM_REG_MODE(bRm))
4492 {
4493 /* greg64, XMM */
4494 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4495 IEM_MC_LOCAL(int64_t, i64Dst);
4496 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4497 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4498
4499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4500 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4501 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4502
4503 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4504 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i64_r64, pi64Dst, pu64Src);
4505 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4506
4507 IEM_MC_ADVANCE_RIP_AND_FINISH();
4508 IEM_MC_END();
4509 }
4510 else
4511 {
4512 /* greg64, [mem64] */
4513 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4515 IEM_MC_LOCAL(int64_t, i64Dst);
4516 IEM_MC_LOCAL(uint64_t, u64Src);
4517 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4518 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4519
4520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4522 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4523 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4524
4525 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4526 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i64_r64, pi64Dst, pu64Src);
4527 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4528
4529 IEM_MC_ADVANCE_RIP_AND_FINISH();
4530 IEM_MC_END();
4531 }
4532 }
4533 else
4534 {
4535 if (IEM_IS_MODRM_REG_MODE(bRm))
4536 {
4537 /* greg32, XMM */
4538 IEM_MC_BEGIN(0, 0);
4539 IEM_MC_LOCAL(int32_t, i32Dst);
4540 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4541 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4542
4543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4544 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4545 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4546
4547 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4548 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i32_r64, pi32Dst, pu64Src);
4549 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4550
4551 IEM_MC_ADVANCE_RIP_AND_FINISH();
4552 IEM_MC_END();
4553 }
4554 else
4555 {
4556 /* greg32, [mem64] */
4557 IEM_MC_BEGIN(0, 0);
4558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4559 IEM_MC_LOCAL(int32_t, i32Dst);
4560 IEM_MC_LOCAL(uint64_t, u64Src);
4561 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4562 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4563
4564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4566 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4567 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4568
4569 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4570 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i32_r64, pi32Dst, pu64Src);
4571 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4572
4573 IEM_MC_ADVANCE_RIP_AND_FINISH();
4574 IEM_MC_END();
4575 }
4576 }
4577}
4578
4579
4580/**
4581 * @opcode 0x2e
4582 * @oppfx none
4583 * @opflmodify cf,pf,af,zf,sf,of
4584 * @opflclear af,sf,of
4585 */
4586FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4587{
4588 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4589 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4590 if (IEM_IS_MODRM_REG_MODE(bRm))
4591 {
4592 /*
4593 * Register, register.
4594 */
4595 IEM_MC_BEGIN(0, 0);
4596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4597 IEM_MC_LOCAL(uint32_t, fEFlags);
4598 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4599 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4600 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4601 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4602 IEM_MC_PREPARE_SSE_USAGE();
4603 IEM_MC_FETCH_EFLAGS(fEFlags);
4604 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4605 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
4606 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomiss_u128, pEFlags, uSrc1, uSrc2);
4607 IEM_MC_COMMIT_EFLAGS(fEFlags);
4608
4609 IEM_MC_ADVANCE_RIP_AND_FINISH();
4610 IEM_MC_END();
4611 }
4612 else
4613 {
4614 /*
4615 * Register, memory.
4616 */
4617 IEM_MC_BEGIN(0, 0);
4618 IEM_MC_LOCAL(uint32_t, fEFlags);
4619 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4620 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4621 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4623
4624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4626 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4627 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4628
4629 IEM_MC_PREPARE_SSE_USAGE();
4630 IEM_MC_FETCH_EFLAGS(fEFlags);
4631 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4632 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomiss_u128, pEFlags, uSrc1, uSrc2);
4633 IEM_MC_COMMIT_EFLAGS(fEFlags);
4634
4635 IEM_MC_ADVANCE_RIP_AND_FINISH();
4636 IEM_MC_END();
4637 }
4638}
4639
4640
4641/**
4642 * @opcode 0x2e
4643 * @oppfx 0x66
4644 * @opflmodify cf,pf,af,zf,sf,of
4645 * @opflclear af,sf,of
4646 */
4647FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4648{
4649 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4650 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4651 if (IEM_IS_MODRM_REG_MODE(bRm))
4652 {
4653 /*
4654 * Register, register.
4655 */
4656 IEM_MC_BEGIN(0, 0);
4657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4658 IEM_MC_LOCAL(uint32_t, fEFlags);
4659 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4660 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4661 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4662 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4663 IEM_MC_PREPARE_SSE_USAGE();
4664 IEM_MC_FETCH_EFLAGS(fEFlags);
4665 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4666 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
4667 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomisd_u128, pEFlags, uSrc1, uSrc2);
4668 IEM_MC_COMMIT_EFLAGS(fEFlags);
4669
4670 IEM_MC_ADVANCE_RIP_AND_FINISH();
4671 IEM_MC_END();
4672 }
4673 else
4674 {
4675 /*
4676 * Register, memory.
4677 */
4678 IEM_MC_BEGIN(0, 0);
4679 IEM_MC_LOCAL(uint32_t, fEFlags);
4680 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4681 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4682 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4683 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4684
4685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4687 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4688 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4689
4690 IEM_MC_PREPARE_SSE_USAGE();
4691 IEM_MC_FETCH_EFLAGS(fEFlags);
4692 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4693 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomisd_u128, pEFlags, uSrc1, uSrc2);
4694 IEM_MC_COMMIT_EFLAGS(fEFlags);
4695
4696 IEM_MC_ADVANCE_RIP_AND_FINISH();
4697 IEM_MC_END();
4698 }
4699}
4700
4701
4702/* Opcode 0xf3 0x0f 0x2e - invalid */
4703/* Opcode 0xf2 0x0f 0x2e - invalid */
4704
4705
4706/**
4707 * @opcode 0x2e
4708 * @oppfx none
4709 * @opflmodify cf,pf,af,zf,sf,of
4710 * @opflclear af,sf,of
4711 */
4712FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
4713{
4714 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4716 if (IEM_IS_MODRM_REG_MODE(bRm))
4717 {
4718 /*
4719 * Register, register.
4720 */
4721 IEM_MC_BEGIN(0, 0);
4722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4723 IEM_MC_LOCAL(uint32_t, fEFlags);
4724 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4725 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4726 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4727 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4728 IEM_MC_PREPARE_SSE_USAGE();
4729 IEM_MC_FETCH_EFLAGS(fEFlags);
4730 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4731 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
4732 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comiss_u128, pEFlags, uSrc1, uSrc2);
4733 IEM_MC_COMMIT_EFLAGS(fEFlags);
4734
4735 IEM_MC_ADVANCE_RIP_AND_FINISH();
4736 IEM_MC_END();
4737 }
4738 else
4739 {
4740 /*
4741 * Register, memory.
4742 */
4743 IEM_MC_BEGIN(0, 0);
4744 IEM_MC_LOCAL(uint32_t, fEFlags);
4745 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4746 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4747 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4748 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4749
4750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4752 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4753 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4754
4755 IEM_MC_PREPARE_SSE_USAGE();
4756 IEM_MC_FETCH_EFLAGS(fEFlags);
4757 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4758 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comiss_u128, pEFlags, uSrc1, uSrc2);
4759 IEM_MC_COMMIT_EFLAGS(fEFlags);
4760
4761 IEM_MC_ADVANCE_RIP_AND_FINISH();
4762 IEM_MC_END();
4763 }
4764}
4765
4766
4767/**
4768 * @opcode 0x2f
4769 * @oppfx 0x66
4770 * @opflmodify cf,pf,af,zf,sf,of
4771 * @opflclear af,sf,of
4772 */
4773FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
4774{
4775 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4777 if (IEM_IS_MODRM_REG_MODE(bRm))
4778 {
4779 /*
4780 * Register, register.
4781 */
4782 IEM_MC_BEGIN(0, 0);
4783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4784 IEM_MC_LOCAL(uint32_t, fEFlags);
4785 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4786 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4787 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4788 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4789 IEM_MC_PREPARE_SSE_USAGE();
4790 IEM_MC_FETCH_EFLAGS(fEFlags);
4791 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4792 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
4793 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comisd_u128, pEFlags, uSrc1, uSrc2);
4794 IEM_MC_COMMIT_EFLAGS(fEFlags);
4795
4796 IEM_MC_ADVANCE_RIP_AND_FINISH();
4797 IEM_MC_END();
4798 }
4799 else
4800 {
4801 /*
4802 * Register, memory.
4803 */
4804 IEM_MC_BEGIN(0, 0);
4805 IEM_MC_LOCAL(uint32_t, fEFlags);
4806 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4807 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4808 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4810
4811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4813 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4814 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4815
4816 IEM_MC_PREPARE_SSE_USAGE();
4817 IEM_MC_FETCH_EFLAGS(fEFlags);
4818 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4819 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comisd_u128, pEFlags, uSrc1, uSrc2);
4820 IEM_MC_COMMIT_EFLAGS(fEFlags);
4821
4822 IEM_MC_ADVANCE_RIP_AND_FINISH();
4823 IEM_MC_END();
4824 }
4825}
4826
4827
4828/* Opcode 0xf3 0x0f 0x2f - invalid */
4829/* Opcode 0xf2 0x0f 0x2f - invalid */
4830
4831/** Opcode 0x0f 0x30. */
4832FNIEMOP_DEF(iemOp_wrmsr)
4833{
4834 IEMOP_MNEMONIC(wrmsr, "wrmsr");
4835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4836 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wrmsr);
4837}
4838
4839
4840/** Opcode 0x0f 0x31. */
4841FNIEMOP_DEF(iemOp_rdtsc)
4842{
4843 IEMOP_MNEMONIC(rdtsc, "rdtsc");
4844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4845 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4846 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4847 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4848 iemCImpl_rdtsc);
4849}
4850
4851
4852/** Opcode 0x0f 0x33. */
4853FNIEMOP_DEF(iemOp_rdmsr)
4854{
4855 IEMOP_MNEMONIC(rdmsr, "rdmsr");
4856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4857 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4858 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4859 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4860 iemCImpl_rdmsr);
4861}
4862
4863
4864/** Opcode 0x0f 0x34. */
4865FNIEMOP_DEF(iemOp_rdpmc)
4866{
4867 IEMOP_MNEMONIC(rdpmc, "rdpmc");
4868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4869 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4870 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4871 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4872 iemCImpl_rdpmc);
4873}
4874
4875
4876/** Opcode 0x0f 0x34. */
4877FNIEMOP_DEF(iemOp_sysenter)
4878{
4879 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4881 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
4882 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
4883 iemCImpl_sysenter);
4884}
4885
4886/** Opcode 0x0f 0x35. */
4887FNIEMOP_DEF(iemOp_sysexit)
4888{
4889 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4891 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
4892 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
4893 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
4894}
4895
4896/** Opcode 0x0f 0x37. */
4897FNIEMOP_STUB(iemOp_getsec);
4898
4899
4900/** Opcode 0x0f 0x38. */
4901FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
4902{
4903#ifdef IEM_WITH_THREE_0F_38
4904 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4905 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4906#else
4907 IEMOP_BITCH_ABOUT_STUB();
4908 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4909#endif
4910}
4911
4912
4913/** Opcode 0x0f 0x3a. */
4914FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
4915{
4916#ifdef IEM_WITH_THREE_0F_3A
4917 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4918 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4919#else
4920 IEMOP_BITCH_ABOUT_STUB();
4921 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4922#endif
4923}
4924
4925
4926/**
4927 * Implements a conditional move.
4928 *
4929 * Wish there was an obvious way to do this where we could share and reduce
4930 * code bloat.
4931 *
4932 * @param a_Cnd The conditional "microcode" operation.
4933 */
4934#define CMOV_X(a_Cnd) \
4935 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4936 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4937 { \
4938 switch (pVCpu->iem.s.enmEffOpSize) \
4939 { \
4940 case IEMMODE_16BIT: \
4941 IEM_MC_BEGIN(0, 0); \
4942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4943 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4944 a_Cnd { \
4945 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4946 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4947 } IEM_MC_ENDIF(); \
4948 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4949 IEM_MC_END(); \
4950 break; \
4951 \
4952 case IEMMODE_32BIT: \
4953 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4955 IEM_MC_LOCAL(uint32_t, u32Tmp); \
4956 a_Cnd { \
4957 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4958 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
4959 } IEM_MC_ELSE() { \
4960 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
4961 } IEM_MC_ENDIF(); \
4962 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4963 IEM_MC_END(); \
4964 break; \
4965 \
4966 case IEMMODE_64BIT: \
4967 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4969 IEM_MC_LOCAL(uint64_t, u64Tmp); \
4970 a_Cnd { \
4971 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4972 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
4973 } IEM_MC_ENDIF(); \
4974 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4975 IEM_MC_END(); \
4976 break; \
4977 \
4978 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4979 } \
4980 } \
4981 else \
4982 { \
4983 switch (pVCpu->iem.s.enmEffOpSize) \
4984 { \
4985 case IEMMODE_16BIT: \
4986 IEM_MC_BEGIN(0, 0); \
4987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4988 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4991 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4992 a_Cnd { \
4993 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4994 } IEM_MC_ENDIF(); \
4995 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4996 IEM_MC_END(); \
4997 break; \
4998 \
4999 case IEMMODE_32BIT: \
5000 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5002 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5005 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5006 a_Cnd { \
5007 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5008 } IEM_MC_ELSE() { \
5009 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5010 } IEM_MC_ENDIF(); \
5011 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5012 IEM_MC_END(); \
5013 break; \
5014 \
5015 case IEMMODE_64BIT: \
5016 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5018 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5021 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5022 a_Cnd { \
5023 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5024 } IEM_MC_ENDIF(); \
5025 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5026 IEM_MC_END(); \
5027 break; \
5028 \
5029 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5030 } \
5031 } do {} while (0)
5032
5033
5034
5035/**
5036 * @opcode 0x40
5037 * @opfltest of
5038 */
5039FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5040{
5041 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5042 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5043}
5044
5045
5046/**
5047 * @opcode 0x41
5048 * @opfltest of
5049 */
5050FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5051{
5052 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5053 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5054}
5055
5056
5057/**
5058 * @opcode 0x42
5059 * @opfltest cf
5060 */
5061FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5062{
5063 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5064 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5065}
5066
5067
5068/**
5069 * @opcode 0x43
5070 * @opfltest cf
5071 */
5072FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5073{
5074 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5075 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5076}
5077
5078
5079/**
5080 * @opcode 0x44
5081 * @opfltest zf
5082 */
5083FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5084{
5085 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5086 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5087}
5088
5089
5090/**
5091 * @opcode 0x45
5092 * @opfltest zf
5093 */
5094FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5095{
5096 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5097 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5098}
5099
5100
5101/**
5102 * @opcode 0x46
5103 * @opfltest cf,zf
5104 */
5105FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5106{
5107 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5108 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5109}
5110
5111
5112/**
5113 * @opcode 0x47
5114 * @opfltest cf,zf
5115 */
5116FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5117{
5118 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5119 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5120}
5121
5122
5123/**
5124 * @opcode 0x48
5125 * @opfltest sf
5126 */
5127FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5128{
5129 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5130 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5131}
5132
5133
5134/**
5135 * @opcode 0x49
5136 * @opfltest sf
5137 */
5138FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5139{
5140 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5141 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5142}
5143
5144
5145/**
5146 * @opcode 0x4a
5147 * @opfltest pf
5148 */
5149FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5150{
5151 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5152 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5153}
5154
5155
5156/**
5157 * @opcode 0x4b
5158 * @opfltest pf
5159 */
5160FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5161{
5162 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5163 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5164}
5165
5166
5167/**
5168 * @opcode 0x4c
5169 * @opfltest sf,of
5170 */
5171FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5172{
5173 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5174 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5175}
5176
5177
5178/**
5179 * @opcode 0x4d
5180 * @opfltest sf,of
5181 */
5182FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5183{
5184 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5185 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5186}
5187
5188
5189/**
5190 * @opcode 0x4e
5191 * @opfltest zf,sf,of
5192 */
5193FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5194{
5195 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5196 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5197}
5198
5199
5200/**
5201 * @opcode 0x4e
5202 * @opfltest zf,sf,of
5203 */
5204FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5205{
5206 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5207 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5208}
5209
5210#undef CMOV_X
5211
5212/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5213FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5214{
5215 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5216 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5217 if (IEM_IS_MODRM_REG_MODE(bRm))
5218 {
5219 /*
5220 * Register, register.
5221 */
5222 IEM_MC_BEGIN(0, 0);
5223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5224 IEM_MC_LOCAL(uint8_t, u8Dst);
5225 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5226 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5227 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5228 IEM_MC_PREPARE_SSE_USAGE();
5229 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5230 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5231 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5232 IEM_MC_ADVANCE_RIP_AND_FINISH();
5233 IEM_MC_END();
5234 }
5235 /* No memory operand. */
5236 else
5237 IEMOP_RAISE_INVALID_OPCODE_RET();
5238}
5239
5240
5241/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5242FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5243{
5244 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5245 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5246 if (IEM_IS_MODRM_REG_MODE(bRm))
5247 {
5248 /*
5249 * Register, register.
5250 */
5251 IEM_MC_BEGIN(0, 0);
5252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5253 IEM_MC_LOCAL(uint8_t, u8Dst);
5254 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5255 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5256 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5257 IEM_MC_PREPARE_SSE_USAGE();
5258 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5259 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5260 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5261 IEM_MC_ADVANCE_RIP_AND_FINISH();
5262 IEM_MC_END();
5263 }
5264 /* No memory operand. */
5265 else
5266 IEMOP_RAISE_INVALID_OPCODE_RET();
5267
5268}
5269
5270
5271/* Opcode 0xf3 0x0f 0x50 - invalid */
5272/* Opcode 0xf2 0x0f 0x50 - invalid */
5273
5274
5275/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5276FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5277{
5278 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5279 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5280}
5281
5282
5283/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5284FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5285{
5286 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5287 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5288}
5289
5290
5291/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5292FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5293{
5294 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5295 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5296}
5297
5298
5299/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5300FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5301{
5302 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5303 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5304}
5305
5306
5307/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5308FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5309{
5310 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5311 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5312}
5313
5314
5315/* Opcode 0x66 0x0f 0x52 - invalid */
5316
5317
5318/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5319FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5320{
5321 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5322 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5323}
5324
5325
5326/* Opcode 0xf2 0x0f 0x52 - invalid */
5327
5328
5329/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5330FNIEMOP_DEF(iemOp_rcpps_Vps_Wps)
5331{
5332 IEMOP_MNEMONIC2(RM, RCPPS, rcpps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5333 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rcpps_u128);
5334}
5335
5336
5337/* Opcode 0x66 0x0f 0x53 - invalid */
5338
5339
5340/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5341FNIEMOP_DEF(iemOp_rcpss_Vss_Wss)
5342{
5343 IEMOP_MNEMONIC2(RM, RCPSS, rcpss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5344 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rcpss_u128_r32);
5345}
5346
5347
5348/* Opcode 0xf2 0x0f 0x53 - invalid */
5349
5350
5351/** Opcode 0x0f 0x54 - andps Vps, Wps */
5352FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5353{
5354 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5355 SSE2_OPT_BODY_FullFull_To_Full(pand, iemAImpl_pand_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5356}
5357
5358
5359/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5360FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5361{
5362 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5363 SSE2_OPT_BODY_FullFull_To_Full(pand, iemAImpl_pand_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5364}
5365
5366
5367/* Opcode 0xf3 0x0f 0x54 - invalid */
5368/* Opcode 0xf2 0x0f 0x54 - invalid */
5369
5370
5371/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5372FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5373{
5374 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5375 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_pandn_u128);
5376}
5377
5378
5379/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5380FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5381{
5382 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5383 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pandn_u128);
5384}
5385
5386
5387/* Opcode 0xf3 0x0f 0x55 - invalid */
5388/* Opcode 0xf2 0x0f 0x55 - invalid */
5389
5390
5391/** Opcode 0x0f 0x56 - orps Vps, Wps */
5392FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5393{
5394 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5395 SSE2_OPT_BODY_FullFull_To_Full(por, iemAImpl_por_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5396}
5397
5398
5399/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5400FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5401{
5402 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5403 SSE2_OPT_BODY_FullFull_To_Full(por, iemAImpl_por_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5404}
5405
5406
5407/* Opcode 0xf3 0x0f 0x56 - invalid */
5408/* Opcode 0xf2 0x0f 0x56 - invalid */
5409
5410
5411/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5412FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5413{
5414 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5415 SSE2_OPT_BODY_FullFull_To_Full(pxor, iemAImpl_pxor_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5416}
5417
5418
5419/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5420FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5421{
5422 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5423 SSE2_OPT_BODY_FullFull_To_Full(pxor, iemAImpl_pxor_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5424}
5425
5426
5427/* Opcode 0xf3 0x0f 0x57 - invalid */
5428/* Opcode 0xf2 0x0f 0x57 - invalid */
5429
5430/** Opcode 0x0f 0x58 - addps Vps, Wps */
5431FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5432{
5433 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5434 SSE_FP_BODY_FullFull_To_Full(addps, iemAImpl_addps_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5435}
5436
5437
5438/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5439FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5440{
5441 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5442 SSE_FP_BODY_FullFull_To_Full(addpd, iemAImpl_addpd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5443}
5444
5445
5446/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5447FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5448{
5449 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5450 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5451}
5452
5453
5454/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5455FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5456{
5457 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5458 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5459}
5460
5461
5462/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5463FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5464{
5465 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5466 SSE_FP_BODY_FullFull_To_Full(mulps, iemAImpl_mulps_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5467}
5468
5469
5470/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5471FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5472{
5473 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5474 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5475}
5476
5477
5478/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5479FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5480{
5481 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5482 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5483}
5484
5485
5486/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5487FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5488{
5489 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5490 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5491}
5492
5493
5494/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5495FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5496{
5497 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd_WO, Wps, DISOPTYPE_HARMLESS, 0);
5498 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5499 if (IEM_IS_MODRM_REG_MODE(bRm))
5500 {
5501 /*
5502 * XMM, XMM[63:0].
5503 */
5504 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
5505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5506 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5507 IEM_MC_PREPARE_SSE_USAGE();
5508
5509 IEM_MC_LOCAL(X86XMMREG, SseRes);
5510 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
5511 IEM_MC_ARG(uint64_t const *, pu64Src, 1); /* The input is actually two 32-bit float values, */
5512 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); /* but we've got no matching type or MC. */
5513 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pd_u128, pSseRes, pu64Src);
5514 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
5515
5516 IEM_MC_ADVANCE_RIP_AND_FINISH();
5517 IEM_MC_END();
5518 }
5519 else
5520 {
5521 /*
5522 * XMM, [mem64].
5523 */
5524 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
5525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5528 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5529
5530 IEM_MC_LOCAL(uint64_t, u64Src);
5531 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pu64Src, u64Src, 1); /* (see comment above wrt type) */
5532 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5533
5534 IEM_MC_PREPARE_SSE_USAGE();
5535 IEM_MC_LOCAL(X86XMMREG, SseRes);
5536 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
5537 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pd_u128, pSseRes, pu64Src);
5538 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
5539
5540 IEM_MC_ADVANCE_RIP_AND_FINISH();
5541 IEM_MC_END();
5542 }
5543}
5544
5545
5546/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5547FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5548{
5549 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps_WO, Wpd, DISOPTYPE_HARMLESS, 0);
5550 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5551 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5552}
5553
5554
5555/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5556FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5557{
5558 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5559 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5560}
5561
5562
5563/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5564FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5565{
5566 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5567 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5568}
5569
5570
5571/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5572FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5573{
5574 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5575 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5576 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5577}
5578
5579
5580/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5581FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5582{
5583 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5584 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5585 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5586}
5587
5588
5589/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5590FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5591{
5592 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5593 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5594 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5595}
5596
5597
5598/* Opcode 0xf2 0x0f 0x5b - invalid */
5599
5600
5601/** Opcode 0x0f 0x5c - subps Vps, Wps */
5602FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5603{
5604 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5605 SSE_FP_BODY_FullFull_To_Full(subps, iemAImpl_subps_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5606}
5607
5608
5609/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5610FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5611{
5612 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5613 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5614}
5615
5616
5617/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5618FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5619{
5620 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5621 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5622}
5623
5624
5625/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5626FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5627{
5628 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5629 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5630}
5631
5632
5633/** Opcode 0x0f 0x5d - minps Vps, Wps */
5634FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5635{
5636 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5637 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5638}
5639
5640
5641/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5642FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5643{
5644 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5645 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5646}
5647
5648
5649/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5650FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5651{
5652 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5653 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5654}
5655
5656
5657/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5658FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5659{
5660 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5661 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5662}
5663
5664
5665/** Opcode 0x0f 0x5e - divps Vps, Wps */
5666FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5667{
5668 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5669 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5670}
5671
5672
5673/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5674FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5675{
5676 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5677 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5678}
5679
5680
5681/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5682FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5683{
5684 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5685 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5686}
5687
5688
5689/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5690FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5691{
5692 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5693 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5694}
5695
5696
5697/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5698FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5699{
5700 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5701 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5702}
5703
5704
5705/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5706FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5707{
5708 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5709 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5710}
5711
5712
5713/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5714FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5715{
5716 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5717 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5718}
5719
5720
5721/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5722FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5723{
5724 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5725 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5726}
5727
5728
5729/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5730FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5731{
5732 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5733 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5734}
5735
5736
5737/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5738FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5739{
5740 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5741 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5742}
5743
5744
5745/* Opcode 0xf3 0x0f 0x60 - invalid */
5746
5747
5748/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5749FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5750{
5751 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5752 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5753 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5754}
5755
5756
5757/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5758FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5759{
5760 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5761 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5762}
5763
5764
5765/* Opcode 0xf3 0x0f 0x61 - invalid */
5766
5767
5768/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5769FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5770{
5771 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5772 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5773}
5774
5775
5776/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5777FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5778{
5779 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5780 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5781}
5782
5783
5784/* Opcode 0xf3 0x0f 0x62 - invalid */
5785
5786
5787
5788/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5789FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5790{
5791 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5792 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5793}
5794
5795
5796/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5797FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5798{
5799 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5800 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5801}
5802
5803
5804/* Opcode 0xf3 0x0f 0x63 - invalid */
5805
5806
5807/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5808FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5809{
5810 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5811 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5812}
5813
5814
5815/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5816FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5817{
5818 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5819 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
5820}
5821
5822
5823/* Opcode 0xf3 0x0f 0x64 - invalid */
5824
5825
5826/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
5827FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
5828{
5829 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5830 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
5831}
5832
5833
5834/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
5835FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
5836{
5837 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5838 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
5839}
5840
5841
5842/* Opcode 0xf3 0x0f 0x65 - invalid */
5843
5844
5845/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
5846FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
5847{
5848 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5849 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
5850}
5851
5852
5853/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
5854FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
5855{
5856 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5857 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
5858}
5859
5860
5861/* Opcode 0xf3 0x0f 0x66 - invalid */
5862
5863
5864/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
5865FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
5866{
5867 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5868 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
5869}
5870
5871
5872/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
5873FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
5874{
5875 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5876 SSE2_OPT_BODY_FullFull_To_Full(packuswb, iemAImpl_packuswb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5877}
5878
5879
5880/* Opcode 0xf3 0x0f 0x67 - invalid */
5881
5882
5883/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
5884 * @note Intel and AMD both uses Qd for the second parameter, however they
5885 * both list it as a mmX/mem64 operand and intel describes it as being
5886 * loaded as a qword, so it should be Qq, shouldn't it? */
5887FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
5888{
5889 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5890 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
5891}
5892
5893
5894/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
5895FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
5896{
5897 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5898 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
5899}
5900
5901
5902/* Opcode 0xf3 0x0f 0x68 - invalid */
5903
5904
5905/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
5906 * @note Intel and AMD both uses Qd for the second parameter, however they
5907 * both list it as a mmX/mem64 operand and intel describes it as being
5908 * loaded as a qword, so it should be Qq, shouldn't it? */
5909FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
5910{
5911 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5912 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
5913}
5914
5915
5916/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
5917FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
5918{
5919 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5920 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
5921
5922}
5923
5924
5925/* Opcode 0xf3 0x0f 0x69 - invalid */
5926
5927
5928/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
5929 * @note Intel and AMD both uses Qd for the second parameter, however they
5930 * both list it as a mmX/mem64 operand and intel describes it as being
5931 * loaded as a qword, so it should be Qq, shouldn't it? */
5932FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
5933{
5934 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5935 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
5936}
5937
5938
5939/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
5940FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
5941{
5942 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5943 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
5944}
5945
5946
5947/* Opcode 0xf3 0x0f 0x6a - invalid */
5948
5949
5950/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
5951FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
5952{
5953 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5954 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
5955}
5956
5957
5958/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
5959FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
5960{
5961 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5962 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
5963}
5964
5965
5966/* Opcode 0xf3 0x0f 0x6b - invalid */
5967
5968
5969/* Opcode 0x0f 0x6c - invalid */
5970
5971
5972/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
5973FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
5974{
5975 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5976 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
5977}
5978
5979
5980/* Opcode 0xf3 0x0f 0x6c - invalid */
5981/* Opcode 0xf2 0x0f 0x6c - invalid */
5982
5983
5984/* Opcode 0x0f 0x6d - invalid */
5985
5986
5987/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
5988FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
5989{
5990 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5991 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
5992}
5993
5994
5995/* Opcode 0xf3 0x0f 0x6d - invalid */
5996
5997
5998FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
5999{
6000 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6001 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6002 {
6003 /**
6004 * @opcode 0x6e
6005 * @opcodesub rex.w=1
6006 * @oppfx none
6007 * @opcpuid mmx
6008 * @opgroup og_mmx_datamove
6009 * @opxcpttype 5
6010 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6011 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6012 */
6013 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6014 if (IEM_IS_MODRM_REG_MODE(bRm))
6015 {
6016 /* MMX, greg64 */
6017 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6019 IEM_MC_LOCAL(uint64_t, u64Tmp);
6020
6021 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6022 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6023 IEM_MC_FPU_TO_MMX_MODE();
6024
6025 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6026 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6027
6028 IEM_MC_ADVANCE_RIP_AND_FINISH();
6029 IEM_MC_END();
6030 }
6031 else
6032 {
6033 /* MMX, [mem64] */
6034 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6036 IEM_MC_LOCAL(uint64_t, u64Tmp);
6037
6038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6040 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6041 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6042
6043 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6044 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6045 IEM_MC_FPU_TO_MMX_MODE();
6046
6047 IEM_MC_ADVANCE_RIP_AND_FINISH();
6048 IEM_MC_END();
6049 }
6050 }
6051 else
6052 {
6053 /**
6054 * @opdone
6055 * @opcode 0x6e
6056 * @opcodesub rex.w=0
6057 * @oppfx none
6058 * @opcpuid mmx
6059 * @opgroup og_mmx_datamove
6060 * @opxcpttype 5
6061 * @opfunction iemOp_movd_q_Pd_Ey
6062 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6063 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6064 */
6065 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6066 if (IEM_IS_MODRM_REG_MODE(bRm))
6067 {
6068 /* MMX, greg32 */
6069 IEM_MC_BEGIN(0, 0);
6070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6071 IEM_MC_LOCAL(uint32_t, u32Tmp);
6072
6073 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6074 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6075 IEM_MC_FPU_TO_MMX_MODE();
6076
6077 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6078 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6079
6080 IEM_MC_ADVANCE_RIP_AND_FINISH();
6081 IEM_MC_END();
6082 }
6083 else
6084 {
6085 /* MMX, [mem32] */
6086 IEM_MC_BEGIN(0, 0);
6087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6088 IEM_MC_LOCAL(uint32_t, u32Tmp);
6089
6090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6092 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6093 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6094
6095 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6096 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6097 IEM_MC_FPU_TO_MMX_MODE();
6098
6099 IEM_MC_ADVANCE_RIP_AND_FINISH();
6100 IEM_MC_END();
6101 }
6102 }
6103}
6104
6105FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6106{
6107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6108 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6109 {
6110 /**
6111 * @opcode 0x6e
6112 * @opcodesub rex.w=1
6113 * @oppfx 0x66
6114 * @opcpuid sse2
6115 * @opgroup og_sse2_simdint_datamove
6116 * @opxcpttype 5
6117 * @optest 64-bit / op1=1 op2=2 -> op1=2
6118 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6119 */
6120 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6121 if (IEM_IS_MODRM_REG_MODE(bRm))
6122 {
6123 /* XMM, greg64 */
6124 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6126 IEM_MC_LOCAL(uint64_t, u64Tmp);
6127
6128 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6129 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6130
6131 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6132 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6133
6134 IEM_MC_ADVANCE_RIP_AND_FINISH();
6135 IEM_MC_END();
6136 }
6137 else
6138 {
6139 /* XMM, [mem64] */
6140 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6142 IEM_MC_LOCAL(uint64_t, u64Tmp);
6143
6144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6146 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6147 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6148
6149 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6150 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6151
6152 IEM_MC_ADVANCE_RIP_AND_FINISH();
6153 IEM_MC_END();
6154 }
6155 }
6156 else
6157 {
6158 /**
6159 * @opdone
6160 * @opcode 0x6e
6161 * @opcodesub rex.w=0
6162 * @oppfx 0x66
6163 * @opcpuid sse2
6164 * @opgroup og_sse2_simdint_datamove
6165 * @opxcpttype 5
6166 * @opfunction iemOp_movd_q_Vy_Ey
6167 * @optest op1=1 op2=2 -> op1=2
6168 * @optest op1=0 op2=-42 -> op1=-42
6169 */
6170 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6171 if (IEM_IS_MODRM_REG_MODE(bRm))
6172 {
6173 /* XMM, greg32 */
6174 IEM_MC_BEGIN(0, 0);
6175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6176 IEM_MC_LOCAL(uint32_t, u32Tmp);
6177
6178 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6179 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6180
6181 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6182 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6183
6184 IEM_MC_ADVANCE_RIP_AND_FINISH();
6185 IEM_MC_END();
6186 }
6187 else
6188 {
6189 /* XMM, [mem32] */
6190 IEM_MC_BEGIN(0, 0);
6191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6192 IEM_MC_LOCAL(uint32_t, u32Tmp);
6193
6194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6196 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6197 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6198
6199 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6200 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6201
6202 IEM_MC_ADVANCE_RIP_AND_FINISH();
6203 IEM_MC_END();
6204 }
6205 }
6206}
6207
6208/* Opcode 0xf3 0x0f 0x6e - invalid */
6209
6210
6211/**
6212 * @opcode 0x6f
6213 * @oppfx none
6214 * @opcpuid mmx
6215 * @opgroup og_mmx_datamove
6216 * @opxcpttype 5
6217 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6218 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6219 */
6220FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6221{
6222 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6223 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6224 if (IEM_IS_MODRM_REG_MODE(bRm))
6225 {
6226 /*
6227 * Register, register.
6228 */
6229 IEM_MC_BEGIN(0, 0);
6230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6231 IEM_MC_LOCAL(uint64_t, u64Tmp);
6232
6233 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6234 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6235 IEM_MC_FPU_TO_MMX_MODE();
6236
6237 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6238 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6239
6240 IEM_MC_ADVANCE_RIP_AND_FINISH();
6241 IEM_MC_END();
6242 }
6243 else
6244 {
6245 /*
6246 * Register, memory.
6247 */
6248 IEM_MC_BEGIN(0, 0);
6249 IEM_MC_LOCAL(uint64_t, u64Tmp);
6250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6251
6252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6254 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6255 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6256
6257 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6258 IEM_MC_FPU_TO_MMX_MODE();
6259
6260 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6261
6262 IEM_MC_ADVANCE_RIP_AND_FINISH();
6263 IEM_MC_END();
6264 }
6265}
6266
6267/**
6268 * @opcode 0x6f
6269 * @oppfx 0x66
6270 * @opcpuid sse2
6271 * @opgroup og_sse2_simdint_datamove
6272 * @opxcpttype 1
6273 * @optest op1=1 op2=2 -> op1=2
6274 * @optest op1=0 op2=-42 -> op1=-42
6275 */
6276FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6277{
6278 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6280 if (IEM_IS_MODRM_REG_MODE(bRm))
6281 {
6282 /*
6283 * Register, register.
6284 */
6285 IEM_MC_BEGIN(0, 0);
6286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6287
6288 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6289 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6290
6291 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6292 IEM_GET_MODRM_RM(pVCpu, bRm));
6293 IEM_MC_ADVANCE_RIP_AND_FINISH();
6294 IEM_MC_END();
6295 }
6296 else
6297 {
6298 /*
6299 * Register, memory.
6300 */
6301 IEM_MC_BEGIN(0, 0);
6302 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6304
6305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6307 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6308 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6309
6310 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6311 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6312
6313 IEM_MC_ADVANCE_RIP_AND_FINISH();
6314 IEM_MC_END();
6315 }
6316}
6317
6318/**
6319 * @opcode 0x6f
6320 * @oppfx 0xf3
6321 * @opcpuid sse2
6322 * @opgroup og_sse2_simdint_datamove
6323 * @opxcpttype 4UA
6324 * @optest op1=1 op2=2 -> op1=2
6325 * @optest op1=0 op2=-42 -> op1=-42
6326 */
6327FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6328{
6329 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6331 if (IEM_IS_MODRM_REG_MODE(bRm))
6332 {
6333 /*
6334 * Register, register.
6335 */
6336 IEM_MC_BEGIN(0, 0);
6337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6338 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6339 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6340 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6341 IEM_GET_MODRM_RM(pVCpu, bRm));
6342 IEM_MC_ADVANCE_RIP_AND_FINISH();
6343 IEM_MC_END();
6344 }
6345 else
6346 {
6347 /*
6348 * Register, memory.
6349 */
6350 IEM_MC_BEGIN(0, 0);
6351 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6353
6354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6356 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6357 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6358 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6359 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6360
6361 IEM_MC_ADVANCE_RIP_AND_FINISH();
6362 IEM_MC_END();
6363 }
6364}
6365
6366
6367/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6368FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6369{
6370 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6372 if (IEM_IS_MODRM_REG_MODE(bRm))
6373 {
6374 /*
6375 * Register, register.
6376 */
6377 IEM_MC_BEGIN(0, 0);
6378 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6380 IEM_MC_ARG(uint64_t *, pDst, 0);
6381 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6382 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6383 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6384 IEM_MC_PREPARE_FPU_USAGE();
6385 IEM_MC_FPU_TO_MMX_MODE();
6386
6387 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6388 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6389 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6390 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6391
6392 IEM_MC_ADVANCE_RIP_AND_FINISH();
6393 IEM_MC_END();
6394 }
6395 else
6396 {
6397 /*
6398 * Register, memory.
6399 */
6400 IEM_MC_BEGIN(0, 0);
6401 IEM_MC_ARG(uint64_t *, pDst, 0);
6402 IEM_MC_LOCAL(uint64_t, uSrc);
6403 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6405
6406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6407 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6408 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6410 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6411 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6412
6413 IEM_MC_PREPARE_FPU_USAGE();
6414 IEM_MC_FPU_TO_MMX_MODE();
6415
6416 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6417 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6418 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6419
6420 IEM_MC_ADVANCE_RIP_AND_FINISH();
6421 IEM_MC_END();
6422 }
6423}
6424
6425
6426/**
6427 * Common worker for SSE2 instructions on the forms:
6428 * pshufd xmm1, xmm2/mem128, imm8
6429 * pshufhw xmm1, xmm2/mem128, imm8
6430 * pshuflw xmm1, xmm2/mem128, imm8
6431 *
6432 * Proper alignment of the 128-bit operand is enforced.
6433 * Exceptions type 4. SSE2 cpuid checks.
6434 */
6435FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6436{
6437 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6438 if (IEM_IS_MODRM_REG_MODE(bRm))
6439 {
6440 /*
6441 * Register, register.
6442 */
6443 IEM_MC_BEGIN(0, 0);
6444 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6446 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6447 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6448 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6449 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6450 IEM_MC_PREPARE_SSE_USAGE();
6451 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6452 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6453 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6454 IEM_MC_ADVANCE_RIP_AND_FINISH();
6455 IEM_MC_END();
6456 }
6457 else
6458 {
6459 /*
6460 * Register, memory.
6461 */
6462 IEM_MC_BEGIN(0, 0);
6463 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6464 IEM_MC_LOCAL(RTUINT128U, uSrc);
6465 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6467
6468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6469 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6470 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6472 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6473
6474 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6475 IEM_MC_PREPARE_SSE_USAGE();
6476 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6477 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6478
6479 IEM_MC_ADVANCE_RIP_AND_FINISH();
6480 IEM_MC_END();
6481 }
6482}
6483
6484
6485/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6486FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6487{
6488 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6489 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6490}
6491
6492
6493/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6494FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6495{
6496 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6497 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6498}
6499
6500
6501/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6502FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6503{
6504 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6505 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6506}
6507
6508
6509/**
6510 * Common worker for MMX instructions of the form:
6511 * psrlw mm, imm8
6512 * psraw mm, imm8
6513 * psllw mm, imm8
6514 * psrld mm, imm8
6515 * psrad mm, imm8
6516 * pslld mm, imm8
6517 * psrlq mm, imm8
6518 * psllq mm, imm8
6519 *
6520 */
6521FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6522{
6523 if (IEM_IS_MODRM_REG_MODE(bRm))
6524 {
6525 /*
6526 * Register, immediate.
6527 */
6528 IEM_MC_BEGIN(0, 0);
6529 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6531 IEM_MC_ARG(uint64_t *, pDst, 0);
6532 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6533 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6534 IEM_MC_PREPARE_FPU_USAGE();
6535 IEM_MC_FPU_TO_MMX_MODE();
6536
6537 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6538 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6539 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6540
6541 IEM_MC_ADVANCE_RIP_AND_FINISH();
6542 IEM_MC_END();
6543 }
6544 else
6545 {
6546 /*
6547 * Register, memory not supported.
6548 */
6549 /// @todo Caller already enforced register mode?!
6550 AssertFailedReturn(VINF_SUCCESS);
6551 }
6552}
6553
6554
6555#if 0 /*unused*/
6556/**
6557 * Common worker for SSE2 instructions of the form:
6558 * psrlw xmm, imm8
6559 * psraw xmm, imm8
6560 * psllw xmm, imm8
6561 * psrld xmm, imm8
6562 * psrad xmm, imm8
6563 * pslld xmm, imm8
6564 * psrlq xmm, imm8
6565 * psllq xmm, imm8
6566 *
6567 */
6568FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6569{
6570 if (IEM_IS_MODRM_REG_MODE(bRm))
6571 {
6572 /*
6573 * Register, immediate.
6574 */
6575 IEM_MC_BEGIN(0, 0);
6576 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6578 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6579 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6580 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6581 IEM_MC_PREPARE_SSE_USAGE();
6582 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6583 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6584 IEM_MC_ADVANCE_RIP_AND_FINISH();
6585 IEM_MC_END();
6586 }
6587 else
6588 {
6589 /*
6590 * Register, memory.
6591 */
6592 /// @todo Caller already enforced register mode?!
6593 AssertFailedReturn(VINF_SUCCESS);
6594 }
6595}
6596#endif
6597
6598
6599/**
6600 * Preprocessor macro variant of iemOpCommonSse2_Shift_Imm
6601 */
6602#define SSE2_SHIFT_BODY_Imm(a_Ins, a_bRm, a_fRegNativeArchs) \
6603 if (IEM_IS_MODRM_REG_MODE((a_bRm))) \
6604 { \
6605 /* \
6606 * Register, immediate. \
6607 */ \
6608 IEM_MC_BEGIN(0, 0); \
6609 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
6610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2); \
6611 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
6612 IEM_MC_PREPARE_SSE_USAGE(); \
6613 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
6614 IEM_MC_NATIVE_EMIT_2(RT_CONCAT3(iemNativeEmit_,a_Ins,_ri_u128), IEM_GET_MODRM_RM(pVCpu, (a_bRm)), bImm); \
6615 } IEM_MC_NATIVE_ELSE() { \
6616 IEM_MC_ARG(PRTUINT128U, pDst, 0); \
6617 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1); \
6618 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, (a_bRm))); \
6619 IEM_MC_CALL_VOID_AIMPL_2(RT_CONCAT3(iemAImpl_,a_Ins,_imm_u128), pDst, bShiftArg); \
6620 } IEM_MC_NATIVE_ENDIF(); \
6621 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6622 IEM_MC_END(); \
6623 } \
6624 else \
6625 { \
6626 /* \
6627 * Register, memory. \
6628 */ \
6629 AssertFailedReturn(VINF_SUCCESS); \
6630 } (void)0
6631
6632
6633/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6634FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6635{
6636// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6637 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6638}
6639
6640
6641/** Opcode 0x66 0x0f 0x71 11/2. */
6642FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6643{
6644// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6645 SSE2_SHIFT_BODY_Imm(psrlw, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6646}
6647
6648
6649/** Opcode 0x0f 0x71 11/4. */
6650FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6651{
6652// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6653 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6654}
6655
6656
6657/** Opcode 0x66 0x0f 0x71 11/4. */
6658FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6659{
6660// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6661 SSE2_SHIFT_BODY_Imm(psraw, bRm, 0);
6662}
6663
6664
6665/** Opcode 0x0f 0x71 11/6. */
6666FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6667{
6668// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6669 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6670}
6671
6672
6673/** Opcode 0x66 0x0f 0x71 11/6. */
6674FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6675{
6676// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6677 SSE2_SHIFT_BODY_Imm(psllw, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6678}
6679
6680
6681/**
6682 * Group 12 jump table for register variant.
6683 */
6684IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6685{
6686 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6687 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6688 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6689 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6690 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6691 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6692 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6693 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6694};
6695AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6696
6697
6698/** Opcode 0x0f 0x71. */
6699FNIEMOP_DEF(iemOp_Grp12)
6700{
6701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6702 if (IEM_IS_MODRM_REG_MODE(bRm))
6703 /* register, register */
6704 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6705 + pVCpu->iem.s.idxPrefix], bRm);
6706 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6707}
6708
6709
6710/** Opcode 0x0f 0x72 11/2. */
6711FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6712{
6713// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6714 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6715}
6716
6717
6718/** Opcode 0x66 0x0f 0x72 11/2. */
6719FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6720{
6721// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6722 SSE2_SHIFT_BODY_Imm(psrld, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6723}
6724
6725
6726/** Opcode 0x0f 0x72 11/4. */
6727FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6728{
6729// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6730 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6731}
6732
6733
6734/** Opcode 0x66 0x0f 0x72 11/4. */
6735FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6736{
6737// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6738 SSE2_SHIFT_BODY_Imm(psrad, bRm, 0);
6739}
6740
6741
6742/** Opcode 0x0f 0x72 11/6. */
6743FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6744{
6745// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6746 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6747}
6748
6749/** Opcode 0x66 0x0f 0x72 11/6. */
6750FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6751{
6752// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6753 SSE2_SHIFT_BODY_Imm(pslld, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6754}
6755
6756
6757/**
6758 * Group 13 jump table for register variant.
6759 */
6760IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6761{
6762 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6763 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6764 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6765 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6766 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6767 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6768 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6769 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6770};
6771AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6772
6773/** Opcode 0x0f 0x72. */
6774FNIEMOP_DEF(iemOp_Grp13)
6775{
6776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6777 if (IEM_IS_MODRM_REG_MODE(bRm))
6778 /* register, register */
6779 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6780 + pVCpu->iem.s.idxPrefix], bRm);
6781 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6782}
6783
6784
6785/** Opcode 0x0f 0x73 11/2. */
6786FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6787{
6788// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6789 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6790}
6791
6792
6793/** Opcode 0x66 0x0f 0x73 11/2. */
6794FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6795{
6796// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6797 SSE2_SHIFT_BODY_Imm(psrlq, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6798}
6799
6800
6801/** Opcode 0x66 0x0f 0x73 11/3. */
6802FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6803{
6804// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6805 SSE2_SHIFT_BODY_Imm(psrldq, bRm, 0);
6806}
6807
6808
6809/** Opcode 0x0f 0x73 11/6. */
6810FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6811{
6812// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6813 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6814}
6815
6816
6817/** Opcode 0x66 0x0f 0x73 11/6. */
6818FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6819{
6820// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6821 SSE2_SHIFT_BODY_Imm(psllq, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6822}
6823
6824
6825/** Opcode 0x66 0x0f 0x73 11/7. */
6826FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6827{
6828// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6829 SSE2_SHIFT_BODY_Imm(pslldq, bRm, 0);
6830}
6831
6832/**
6833 * Group 14 jump table for register variant.
6834 */
6835IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6836{
6837 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6838 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6839 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6840 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6841 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6842 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6843 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6844 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6845};
6846AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6847
6848
6849/** Opcode 0x0f 0x73. */
6850FNIEMOP_DEF(iemOp_Grp14)
6851{
6852 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6853 if (IEM_IS_MODRM_REG_MODE(bRm))
6854 /* register, register */
6855 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6856 + pVCpu->iem.s.idxPrefix], bRm);
6857 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6858}
6859
6860
6861/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
6862FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
6863{
6864 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6865 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
6866}
6867
6868
6869/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
6870FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
6871{
6872 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6873 SSE2_OPT_BODY_FullFull_To_Full(pcmpeqb, iemAImpl_pcmpeqb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6874}
6875
6876
6877/* Opcode 0xf3 0x0f 0x74 - invalid */
6878/* Opcode 0xf2 0x0f 0x74 - invalid */
6879
6880
6881/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
6882FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
6883{
6884 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6885 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
6886}
6887
6888
6889/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
6890FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
6891{
6892 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6893 SSE2_OPT_BODY_FullFull_To_Full(pcmpeqw, iemAImpl_pcmpeqw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6894}
6895
6896
6897/* Opcode 0xf3 0x0f 0x75 - invalid */
6898/* Opcode 0xf2 0x0f 0x75 - invalid */
6899
6900
6901/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
6902FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
6903{
6904 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6905 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
6906}
6907
6908
6909/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
6910FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
6911{
6912 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6913 SSE2_OPT_BODY_FullFull_To_Full(pcmpeqd, iemAImpl_pcmpeqd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6914}
6915
6916
6917/* Opcode 0xf3 0x0f 0x76 - invalid */
6918/* Opcode 0xf2 0x0f 0x76 - invalid */
6919
6920
6921/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
6922FNIEMOP_DEF(iemOp_emms)
6923{
6924 IEMOP_MNEMONIC(emms, "emms");
6925 IEM_MC_BEGIN(0, 0);
6926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6927 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6928 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6929 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6930 IEM_MC_FPU_FROM_MMX_MODE();
6931 IEM_MC_ADVANCE_RIP_AND_FINISH();
6932 IEM_MC_END();
6933}
6934
6935/* Opcode 0x66 0x0f 0x77 - invalid */
6936/* Opcode 0xf3 0x0f 0x77 - invalid */
6937/* Opcode 0xf2 0x0f 0x77 - invalid */
6938
6939/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
6940#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6941FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
6942{
6943 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
6944 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
6945 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
6946 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
6947
6948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6949 if (IEM_IS_MODRM_REG_MODE(bRm))
6950 {
6951 /*
6952 * Register, register.
6953 */
6954 if (enmEffOpSize == IEMMODE_64BIT)
6955 {
6956 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6957 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6958 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6959 IEM_MC_ARG(uint64_t, u64Enc, 1);
6960 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6961 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6962 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
6963 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
6964 iemCImpl_vmread_reg64, pu64Dst, u64Enc);
6965 IEM_MC_END();
6966 }
6967 else
6968 {
6969 IEM_MC_BEGIN(0, 0);
6970 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6971 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6972 IEM_MC_ARG(uint32_t, u32Enc, 1);
6973 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6974 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6975 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
6976 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
6977 iemCImpl_vmread_reg32, pu64Dst, u32Enc);
6978 IEM_MC_END();
6979 }
6980 }
6981 else
6982 {
6983 /*
6984 * Memory, register.
6985 */
6986 if (enmEffOpSize == IEMMODE_64BIT)
6987 {
6988 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6989 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6991 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6992 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
6993 IEM_MC_ARG(uint64_t, u64Enc, 2);
6994 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6995 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
6996 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
6997 IEM_MC_END();
6998 }
6999 else
7000 {
7001 IEM_MC_BEGIN(0, 0);
7002 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7004 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7005 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7006 IEM_MC_ARG(uint32_t, u32Enc, 2);
7007 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7008 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7009 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7010 IEM_MC_END();
7011 }
7012 }
7013}
7014#else
7015FNIEMOP_UD_STUB(iemOp_vmread_Ey_Gy);
7016#endif
7017
7018/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7019FNIEMOP_STUB(iemOp_AmdGrp17);
7020/* Opcode 0xf3 0x0f 0x78 - invalid */
7021/* Opcode 0xf2 0x0f 0x78 - invalid */
7022
7023/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7024#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7025FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7026{
7027 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7028 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7029 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7030 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7031
7032 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7033 if (IEM_IS_MODRM_REG_MODE(bRm))
7034 {
7035 /*
7036 * Register, register.
7037 */
7038 if (enmEffOpSize == IEMMODE_64BIT)
7039 {
7040 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7041 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7042 IEM_MC_ARG(uint64_t, u64Val, 0);
7043 IEM_MC_ARG(uint64_t, u64Enc, 1);
7044 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7045 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7046 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u64Val, u64Enc);
7047 IEM_MC_END();
7048 }
7049 else
7050 {
7051 IEM_MC_BEGIN(0, 0);
7052 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7053 IEM_MC_ARG(uint32_t, u32Val, 0);
7054 IEM_MC_ARG(uint32_t, u32Enc, 1);
7055 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7056 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7057 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u32Val, u32Enc);
7058 IEM_MC_END();
7059 }
7060 }
7061 else
7062 {
7063 /*
7064 * Register, memory.
7065 */
7066 if (enmEffOpSize == IEMMODE_64BIT)
7067 {
7068 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7069 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7071 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7072 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7073 IEM_MC_ARG(uint64_t, u64Enc, 2);
7074 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7075 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7076 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7077 IEM_MC_END();
7078 }
7079 else
7080 {
7081 IEM_MC_BEGIN(0, 0);
7082 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7084 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7085 IEM_MC_ARG(uint32_t, u32Enc, 2);
7086 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7087 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7088 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7089 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7090 IEM_MC_END();
7091 }
7092 }
7093}
7094#else
7095FNIEMOP_UD_STUB(iemOp_vmwrite_Gy_Ey);
7096#endif
7097/* Opcode 0x66 0x0f 0x79 - invalid */
7098/* Opcode 0xf3 0x0f 0x79 - invalid */
7099/* Opcode 0xf2 0x0f 0x79 - invalid */
7100
7101/* Opcode 0x0f 0x7a - invalid */
7102/* Opcode 0x66 0x0f 0x7a - invalid */
7103/* Opcode 0xf3 0x0f 0x7a - invalid */
7104/* Opcode 0xf2 0x0f 0x7a - invalid */
7105
7106/* Opcode 0x0f 0x7b - invalid */
7107/* Opcode 0x66 0x0f 0x7b - invalid */
7108/* Opcode 0xf3 0x0f 0x7b - invalid */
7109/* Opcode 0xf2 0x0f 0x7b - invalid */
7110
7111/* Opcode 0x0f 0x7c - invalid */
7112
7113
7114/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7115FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7116{
7117 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7118 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7119}
7120
7121
7122/* Opcode 0xf3 0x0f 0x7c - invalid */
7123
7124
7125/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7126FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7127{
7128 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7129 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7130}
7131
7132
7133/* Opcode 0x0f 0x7d - invalid */
7134
7135
7136/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7137FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7138{
7139 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7140 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7141}
7142
7143
7144/* Opcode 0xf3 0x0f 0x7d - invalid */
7145
7146
7147/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7148FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7149{
7150 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7151 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7152}
7153
7154
7155/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7156FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7157{
7158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7159 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7160 {
7161 /**
7162 * @opcode 0x7e
7163 * @opcodesub rex.w=1
7164 * @oppfx none
7165 * @opcpuid mmx
7166 * @opgroup og_mmx_datamove
7167 * @opxcpttype 5
7168 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7169 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7170 */
7171 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7172 if (IEM_IS_MODRM_REG_MODE(bRm))
7173 {
7174 /* greg64, MMX */
7175 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7177 IEM_MC_LOCAL(uint64_t, u64Tmp);
7178
7179 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7180 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7181 IEM_MC_FPU_TO_MMX_MODE();
7182
7183 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7184 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7185
7186 IEM_MC_ADVANCE_RIP_AND_FINISH();
7187 IEM_MC_END();
7188 }
7189 else
7190 {
7191 /* [mem64], MMX */
7192 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7194 IEM_MC_LOCAL(uint64_t, u64Tmp);
7195
7196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7198 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7199 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7200
7201 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7202 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7203 IEM_MC_FPU_TO_MMX_MODE();
7204
7205 IEM_MC_ADVANCE_RIP_AND_FINISH();
7206 IEM_MC_END();
7207 }
7208 }
7209 else
7210 {
7211 /**
7212 * @opdone
7213 * @opcode 0x7e
7214 * @opcodesub rex.w=0
7215 * @oppfx none
7216 * @opcpuid mmx
7217 * @opgroup og_mmx_datamove
7218 * @opxcpttype 5
7219 * @opfunction iemOp_movd_q_Pd_Ey
7220 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7221 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7222 */
7223 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7224 if (IEM_IS_MODRM_REG_MODE(bRm))
7225 {
7226 /* greg32, MMX */
7227 IEM_MC_BEGIN(0, 0);
7228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7229 IEM_MC_LOCAL(uint32_t, u32Tmp);
7230
7231 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7232 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7233 IEM_MC_FPU_TO_MMX_MODE();
7234
7235 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm), 0);
7236 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7237
7238 IEM_MC_ADVANCE_RIP_AND_FINISH();
7239 IEM_MC_END();
7240 }
7241 else
7242 {
7243 /* [mem32], MMX */
7244 IEM_MC_BEGIN(0, 0);
7245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7246 IEM_MC_LOCAL(uint32_t, u32Tmp);
7247
7248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7250 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7251 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7252
7253 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm), 0);
7254 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7255 IEM_MC_FPU_TO_MMX_MODE();
7256
7257 IEM_MC_ADVANCE_RIP_AND_FINISH();
7258 IEM_MC_END();
7259 }
7260 }
7261}
7262
7263
7264FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7265{
7266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7267 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7268 {
7269 /**
7270 * @opcode 0x7e
7271 * @opcodesub rex.w=1
7272 * @oppfx 0x66
7273 * @opcpuid sse2
7274 * @opgroup og_sse2_simdint_datamove
7275 * @opxcpttype 5
7276 * @optest 64-bit / op1=1 op2=2 -> op1=2
7277 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7278 */
7279 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7280 if (IEM_IS_MODRM_REG_MODE(bRm))
7281 {
7282 /* greg64, XMM */
7283 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7285 IEM_MC_LOCAL(uint64_t, u64Tmp);
7286
7287 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7288 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7289
7290 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7291 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7292
7293 IEM_MC_ADVANCE_RIP_AND_FINISH();
7294 IEM_MC_END();
7295 }
7296 else
7297 {
7298 /* [mem64], XMM */
7299 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7301 IEM_MC_LOCAL(uint64_t, u64Tmp);
7302
7303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7305 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7306 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7307
7308 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7309 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7310
7311 IEM_MC_ADVANCE_RIP_AND_FINISH();
7312 IEM_MC_END();
7313 }
7314 }
7315 else
7316 {
7317 /**
7318 * @opdone
7319 * @opcode 0x7e
7320 * @opcodesub rex.w=0
7321 * @oppfx 0x66
7322 * @opcpuid sse2
7323 * @opgroup og_sse2_simdint_datamove
7324 * @opxcpttype 5
7325 * @opfunction iemOp_movd_q_Vy_Ey
7326 * @optest op1=1 op2=2 -> op1=2
7327 * @optest op1=0 op2=-42 -> op1=-42
7328 */
7329 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7330 if (IEM_IS_MODRM_REG_MODE(bRm))
7331 {
7332 /* greg32, XMM */
7333 IEM_MC_BEGIN(0, 0);
7334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7335 IEM_MC_LOCAL(uint32_t, u32Tmp);
7336
7337 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7338 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7339
7340 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7341 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7342
7343 IEM_MC_ADVANCE_RIP_AND_FINISH();
7344 IEM_MC_END();
7345 }
7346 else
7347 {
7348 /* [mem32], XMM */
7349 IEM_MC_BEGIN(0, 0);
7350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7351 IEM_MC_LOCAL(uint32_t, u32Tmp);
7352
7353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7355 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7356 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7357
7358 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7359 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7360
7361 IEM_MC_ADVANCE_RIP_AND_FINISH();
7362 IEM_MC_END();
7363 }
7364 }
7365}
7366
7367/**
7368 * @opcode 0x7e
7369 * @oppfx 0xf3
7370 * @opcpuid sse2
7371 * @opgroup og_sse2_pcksclr_datamove
7372 * @opxcpttype none
7373 * @optest op1=1 op2=2 -> op1=2
7374 * @optest op1=0 op2=-42 -> op1=-42
7375 */
7376FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7377{
7378 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7379 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7380 if (IEM_IS_MODRM_REG_MODE(bRm))
7381 {
7382 /*
7383 * XMM128, XMM64.
7384 */
7385 IEM_MC_BEGIN(0, 0);
7386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7387 IEM_MC_LOCAL(uint64_t, uSrc);
7388
7389 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7390 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7391
7392 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7393 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7394
7395 IEM_MC_ADVANCE_RIP_AND_FINISH();
7396 IEM_MC_END();
7397 }
7398 else
7399 {
7400 /*
7401 * XMM128, [mem64].
7402 */
7403 IEM_MC_BEGIN(0, 0);
7404 IEM_MC_LOCAL(uint64_t, uSrc);
7405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7406
7407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7409 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7410 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7411
7412 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7413 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7414
7415 IEM_MC_ADVANCE_RIP_AND_FINISH();
7416 IEM_MC_END();
7417 }
7418}
7419
7420/* Opcode 0xf2 0x0f 0x7e - invalid */
7421
7422
7423/** Opcode 0x0f 0x7f - movq Qq, Pq */
7424FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7425{
7426 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7427 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7428 if (IEM_IS_MODRM_REG_MODE(bRm))
7429 {
7430 /*
7431 * MMX, MMX.
7432 */
7433 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7434 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7435 IEM_MC_BEGIN(0, 0);
7436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7437 IEM_MC_LOCAL(uint64_t, u64Tmp);
7438 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7439 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7440 IEM_MC_FPU_TO_MMX_MODE();
7441
7442 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7443 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7444
7445 IEM_MC_ADVANCE_RIP_AND_FINISH();
7446 IEM_MC_END();
7447 }
7448 else
7449 {
7450 /*
7451 * [mem64], MMX.
7452 */
7453 IEM_MC_BEGIN(0, 0);
7454 IEM_MC_LOCAL(uint64_t, u64Tmp);
7455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7456
7457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7459 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7460 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7461
7462 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7463 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7464 IEM_MC_FPU_TO_MMX_MODE();
7465
7466 IEM_MC_ADVANCE_RIP_AND_FINISH();
7467 IEM_MC_END();
7468 }
7469}
7470
7471/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7472FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7473{
7474 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7476 if (IEM_IS_MODRM_REG_MODE(bRm))
7477 {
7478 /*
7479 * XMM, XMM.
7480 */
7481 IEM_MC_BEGIN(0, 0);
7482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7483 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7484 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7485 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7486 IEM_GET_MODRM_REG(pVCpu, bRm));
7487 IEM_MC_ADVANCE_RIP_AND_FINISH();
7488 IEM_MC_END();
7489 }
7490 else
7491 {
7492 /*
7493 * [mem128], XMM.
7494 */
7495 IEM_MC_BEGIN(0, 0);
7496 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7498
7499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7501 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7502 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7503
7504 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7505 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7506
7507 IEM_MC_ADVANCE_RIP_AND_FINISH();
7508 IEM_MC_END();
7509 }
7510}
7511
7512/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7513FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7514{
7515 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7516 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7517 if (IEM_IS_MODRM_REG_MODE(bRm))
7518 {
7519 /*
7520 * XMM, XMM.
7521 */
7522 IEM_MC_BEGIN(0, 0);
7523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7524 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7525 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7526 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7527 IEM_GET_MODRM_REG(pVCpu, bRm));
7528 IEM_MC_ADVANCE_RIP_AND_FINISH();
7529 IEM_MC_END();
7530 }
7531 else
7532 {
7533 /*
7534 * [mem128], XMM.
7535 */
7536 IEM_MC_BEGIN(0, 0);
7537 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7539
7540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7542 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7543 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7544
7545 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7546 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7547
7548 IEM_MC_ADVANCE_RIP_AND_FINISH();
7549 IEM_MC_END();
7550 }
7551}
7552
7553/* Opcode 0xf2 0x0f 0x7f - invalid */
7554
7555
7556/**
7557 * @opcode 0x80
7558 * @opfltest of
7559 */
7560FNIEMOP_DEF(iemOp_jo_Jv)
7561{
7562 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7563 IEMOP_HLP_MIN_386();
7564 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7565 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7566 {
7567 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7568 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7570 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7571 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7572 } IEM_MC_ELSE() {
7573 IEM_MC_ADVANCE_RIP_AND_FINISH();
7574 } IEM_MC_ENDIF();
7575 IEM_MC_END();
7576 }
7577 else
7578 {
7579 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7580 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7582 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7583 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7584 } IEM_MC_ELSE() {
7585 IEM_MC_ADVANCE_RIP_AND_FINISH();
7586 } IEM_MC_ENDIF();
7587 IEM_MC_END();
7588 }
7589}
7590
7591
7592/**
7593 * @opcode 0x81
7594 * @opfltest of
7595 */
7596FNIEMOP_DEF(iemOp_jno_Jv)
7597{
7598 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7599 IEMOP_HLP_MIN_386();
7600 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7601 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7602 {
7603 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7604 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7606 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7607 IEM_MC_ADVANCE_RIP_AND_FINISH();
7608 } IEM_MC_ELSE() {
7609 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7610 } IEM_MC_ENDIF();
7611 IEM_MC_END();
7612 }
7613 else
7614 {
7615 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7616 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7618 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7619 IEM_MC_ADVANCE_RIP_AND_FINISH();
7620 } IEM_MC_ELSE() {
7621 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7622 } IEM_MC_ENDIF();
7623 IEM_MC_END();
7624 }
7625}
7626
7627
7628/**
7629 * @opcode 0x82
7630 * @opfltest cf
7631 */
7632FNIEMOP_DEF(iemOp_jc_Jv)
7633{
7634 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7635 IEMOP_HLP_MIN_386();
7636 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7637 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7638 {
7639 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7640 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7642 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7643 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7644 } IEM_MC_ELSE() {
7645 IEM_MC_ADVANCE_RIP_AND_FINISH();
7646 } IEM_MC_ENDIF();
7647 IEM_MC_END();
7648 }
7649 else
7650 {
7651 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7652 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7654 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7655 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7656 } IEM_MC_ELSE() {
7657 IEM_MC_ADVANCE_RIP_AND_FINISH();
7658 } IEM_MC_ENDIF();
7659 IEM_MC_END();
7660 }
7661}
7662
7663
7664/**
7665 * @opcode 0x83
7666 * @opfltest cf
7667 */
7668FNIEMOP_DEF(iemOp_jnc_Jv)
7669{
7670 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7671 IEMOP_HLP_MIN_386();
7672 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7673 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7674 {
7675 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7676 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7678 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7679 IEM_MC_ADVANCE_RIP_AND_FINISH();
7680 } IEM_MC_ELSE() {
7681 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7682 } IEM_MC_ENDIF();
7683 IEM_MC_END();
7684 }
7685 else
7686 {
7687 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7688 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7690 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7691 IEM_MC_ADVANCE_RIP_AND_FINISH();
7692 } IEM_MC_ELSE() {
7693 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7694 } IEM_MC_ENDIF();
7695 IEM_MC_END();
7696 }
7697}
7698
7699
7700/**
7701 * @opcode 0x84
7702 * @opfltest zf
7703 */
7704FNIEMOP_DEF(iemOp_je_Jv)
7705{
7706 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7707 IEMOP_HLP_MIN_386();
7708 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7709 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7710 {
7711 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7712 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7714 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7715 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7716 } IEM_MC_ELSE() {
7717 IEM_MC_ADVANCE_RIP_AND_FINISH();
7718 } IEM_MC_ENDIF();
7719 IEM_MC_END();
7720 }
7721 else
7722 {
7723 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7724 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7726 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7727 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7728 } IEM_MC_ELSE() {
7729 IEM_MC_ADVANCE_RIP_AND_FINISH();
7730 } IEM_MC_ENDIF();
7731 IEM_MC_END();
7732 }
7733}
7734
7735
7736/**
7737 * @opcode 0x85
7738 * @opfltest zf
7739 */
7740FNIEMOP_DEF(iemOp_jne_Jv)
7741{
7742 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7743 IEMOP_HLP_MIN_386();
7744 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7745 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7746 {
7747 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7748 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7750 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7751 IEM_MC_ADVANCE_RIP_AND_FINISH();
7752 } IEM_MC_ELSE() {
7753 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7754 } IEM_MC_ENDIF();
7755 IEM_MC_END();
7756 }
7757 else
7758 {
7759 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7760 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7762 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7763 IEM_MC_ADVANCE_RIP_AND_FINISH();
7764 } IEM_MC_ELSE() {
7765 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7766 } IEM_MC_ENDIF();
7767 IEM_MC_END();
7768 }
7769}
7770
7771
7772/**
7773 * @opcode 0x86
7774 * @opfltest cf,zf
7775 */
7776FNIEMOP_DEF(iemOp_jbe_Jv)
7777{
7778 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7779 IEMOP_HLP_MIN_386();
7780 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7781 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7782 {
7783 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7784 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7786 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7787 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7788 } IEM_MC_ELSE() {
7789 IEM_MC_ADVANCE_RIP_AND_FINISH();
7790 } IEM_MC_ENDIF();
7791 IEM_MC_END();
7792 }
7793 else
7794 {
7795 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7796 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7798 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7799 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7800 } IEM_MC_ELSE() {
7801 IEM_MC_ADVANCE_RIP_AND_FINISH();
7802 } IEM_MC_ENDIF();
7803 IEM_MC_END();
7804 }
7805}
7806
7807
7808/**
7809 * @opcode 0x87
7810 * @opfltest cf,zf
7811 */
7812FNIEMOP_DEF(iemOp_jnbe_Jv)
7813{
7814 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7815 IEMOP_HLP_MIN_386();
7816 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7817 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7818 {
7819 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7820 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7822 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7823 IEM_MC_ADVANCE_RIP_AND_FINISH();
7824 } IEM_MC_ELSE() {
7825 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7826 } IEM_MC_ENDIF();
7827 IEM_MC_END();
7828 }
7829 else
7830 {
7831 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7832 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7834 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7835 IEM_MC_ADVANCE_RIP_AND_FINISH();
7836 } IEM_MC_ELSE() {
7837 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7838 } IEM_MC_ENDIF();
7839 IEM_MC_END();
7840 }
7841}
7842
7843
7844/**
7845 * @opcode 0x88
7846 * @opfltest sf
7847 */
7848FNIEMOP_DEF(iemOp_js_Jv)
7849{
7850 IEMOP_MNEMONIC(js_Jv, "js Jv");
7851 IEMOP_HLP_MIN_386();
7852 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7853 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7854 {
7855 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7856 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7858 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7859 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7860 } IEM_MC_ELSE() {
7861 IEM_MC_ADVANCE_RIP_AND_FINISH();
7862 } IEM_MC_ENDIF();
7863 IEM_MC_END();
7864 }
7865 else
7866 {
7867 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7868 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7870 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7871 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7872 } IEM_MC_ELSE() {
7873 IEM_MC_ADVANCE_RIP_AND_FINISH();
7874 } IEM_MC_ENDIF();
7875 IEM_MC_END();
7876 }
7877}
7878
7879
7880/**
7881 * @opcode 0x89
7882 * @opfltest sf
7883 */
7884FNIEMOP_DEF(iemOp_jns_Jv)
7885{
7886 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
7887 IEMOP_HLP_MIN_386();
7888 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7889 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7890 {
7891 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7892 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7894 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7895 IEM_MC_ADVANCE_RIP_AND_FINISH();
7896 } IEM_MC_ELSE() {
7897 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7898 } IEM_MC_ENDIF();
7899 IEM_MC_END();
7900 }
7901 else
7902 {
7903 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7904 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7906 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7907 IEM_MC_ADVANCE_RIP_AND_FINISH();
7908 } IEM_MC_ELSE() {
7909 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7910 } IEM_MC_ENDIF();
7911 IEM_MC_END();
7912 }
7913}
7914
7915
7916/**
7917 * @opcode 0x8a
7918 * @opfltest pf
7919 */
7920FNIEMOP_DEF(iemOp_jp_Jv)
7921{
7922 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
7923 IEMOP_HLP_MIN_386();
7924 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7925 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7926 {
7927 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7928 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7930 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7931 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7932 } IEM_MC_ELSE() {
7933 IEM_MC_ADVANCE_RIP_AND_FINISH();
7934 } IEM_MC_ENDIF();
7935 IEM_MC_END();
7936 }
7937 else
7938 {
7939 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7940 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7942 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7943 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7944 } IEM_MC_ELSE() {
7945 IEM_MC_ADVANCE_RIP_AND_FINISH();
7946 } IEM_MC_ENDIF();
7947 IEM_MC_END();
7948 }
7949}
7950
7951
7952/**
7953 * @opcode 0x8b
7954 * @opfltest pf
7955 */
7956FNIEMOP_DEF(iemOp_jnp_Jv)
7957{
7958 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
7959 IEMOP_HLP_MIN_386();
7960 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7961 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7962 {
7963 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7964 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7966 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7967 IEM_MC_ADVANCE_RIP_AND_FINISH();
7968 } IEM_MC_ELSE() {
7969 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7970 } IEM_MC_ENDIF();
7971 IEM_MC_END();
7972 }
7973 else
7974 {
7975 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7976 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7978 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7979 IEM_MC_ADVANCE_RIP_AND_FINISH();
7980 } IEM_MC_ELSE() {
7981 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7982 } IEM_MC_ENDIF();
7983 IEM_MC_END();
7984 }
7985}
7986
7987
7988/**
7989 * @opcode 0x8c
7990 * @opfltest sf,of
7991 */
7992FNIEMOP_DEF(iemOp_jl_Jv)
7993{
7994 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
7995 IEMOP_HLP_MIN_386();
7996 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7997 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7998 {
7999 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8000 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8002 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8003 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8004 } IEM_MC_ELSE() {
8005 IEM_MC_ADVANCE_RIP_AND_FINISH();
8006 } IEM_MC_ENDIF();
8007 IEM_MC_END();
8008 }
8009 else
8010 {
8011 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8012 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8014 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8015 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8016 } IEM_MC_ELSE() {
8017 IEM_MC_ADVANCE_RIP_AND_FINISH();
8018 } IEM_MC_ENDIF();
8019 IEM_MC_END();
8020 }
8021}
8022
8023
8024/**
8025 * @opcode 0x8d
8026 * @opfltest sf,of
8027 */
8028FNIEMOP_DEF(iemOp_jnl_Jv)
8029{
8030 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8031 IEMOP_HLP_MIN_386();
8032 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8033 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8034 {
8035 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8036 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8038 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8039 IEM_MC_ADVANCE_RIP_AND_FINISH();
8040 } IEM_MC_ELSE() {
8041 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8042 } IEM_MC_ENDIF();
8043 IEM_MC_END();
8044 }
8045 else
8046 {
8047 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8048 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8050 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8051 IEM_MC_ADVANCE_RIP_AND_FINISH();
8052 } IEM_MC_ELSE() {
8053 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8054 } IEM_MC_ENDIF();
8055 IEM_MC_END();
8056 }
8057}
8058
8059
8060/**
8061 * @opcode 0x8e
8062 * @opfltest zf,sf,of
8063 */
8064FNIEMOP_DEF(iemOp_jle_Jv)
8065{
8066 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8067 IEMOP_HLP_MIN_386();
8068 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8069 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8070 {
8071 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8072 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8074 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8075 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8076 } IEM_MC_ELSE() {
8077 IEM_MC_ADVANCE_RIP_AND_FINISH();
8078 } IEM_MC_ENDIF();
8079 IEM_MC_END();
8080 }
8081 else
8082 {
8083 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8084 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8086 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8087 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8088 } IEM_MC_ELSE() {
8089 IEM_MC_ADVANCE_RIP_AND_FINISH();
8090 } IEM_MC_ENDIF();
8091 IEM_MC_END();
8092 }
8093}
8094
8095
8096/**
8097 * @opcode 0x8f
8098 * @opfltest zf,sf,of
8099 */
8100FNIEMOP_DEF(iemOp_jnle_Jv)
8101{
8102 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8103 IEMOP_HLP_MIN_386();
8104 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8105 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8106 {
8107 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8108 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8110 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8111 IEM_MC_ADVANCE_RIP_AND_FINISH();
8112 } IEM_MC_ELSE() {
8113 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8114 } IEM_MC_ENDIF();
8115 IEM_MC_END();
8116 }
8117 else
8118 {
8119 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8120 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8122 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8123 IEM_MC_ADVANCE_RIP_AND_FINISH();
8124 } IEM_MC_ELSE() {
8125 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8126 } IEM_MC_ENDIF();
8127 IEM_MC_END();
8128 }
8129}
8130
8131
8132/**
8133 * @opcode 0x90
8134 * @opfltest of
8135 */
8136FNIEMOP_DEF(iemOp_seto_Eb)
8137{
8138 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8139 IEMOP_HLP_MIN_386();
8140 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8141
8142 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8143 * any way. AMD says it's "unused", whatever that means. We're
8144 * ignoring for now. */
8145 if (IEM_IS_MODRM_REG_MODE(bRm))
8146 {
8147 /* register target */
8148 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8150 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8151 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8152 } IEM_MC_ELSE() {
8153 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8154 } IEM_MC_ENDIF();
8155 IEM_MC_ADVANCE_RIP_AND_FINISH();
8156 IEM_MC_END();
8157 }
8158 else
8159 {
8160 /* memory target */
8161 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8165 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8166 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8167 } IEM_MC_ELSE() {
8168 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8169 } IEM_MC_ENDIF();
8170 IEM_MC_ADVANCE_RIP_AND_FINISH();
8171 IEM_MC_END();
8172 }
8173}
8174
8175
8176/**
8177 * @opcode 0x91
8178 * @opfltest of
8179 */
8180FNIEMOP_DEF(iemOp_setno_Eb)
8181{
8182 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8183 IEMOP_HLP_MIN_386();
8184 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8185
8186 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8187 * any way. AMD says it's "unused", whatever that means. We're
8188 * ignoring for now. */
8189 if (IEM_IS_MODRM_REG_MODE(bRm))
8190 {
8191 /* register target */
8192 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8194 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8195 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8196 } IEM_MC_ELSE() {
8197 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8198 } IEM_MC_ENDIF();
8199 IEM_MC_ADVANCE_RIP_AND_FINISH();
8200 IEM_MC_END();
8201 }
8202 else
8203 {
8204 /* memory target */
8205 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8209 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8210 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8211 } IEM_MC_ELSE() {
8212 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8213 } IEM_MC_ENDIF();
8214 IEM_MC_ADVANCE_RIP_AND_FINISH();
8215 IEM_MC_END();
8216 }
8217}
8218
8219
8220/**
8221 * @opcode 0x92
8222 * @opfltest cf
8223 */
8224FNIEMOP_DEF(iemOp_setc_Eb)
8225{
8226 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8227 IEMOP_HLP_MIN_386();
8228 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8229
8230 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8231 * any way. AMD says it's "unused", whatever that means. We're
8232 * ignoring for now. */
8233 if (IEM_IS_MODRM_REG_MODE(bRm))
8234 {
8235 /* register target */
8236 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8238 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8239 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8240 } IEM_MC_ELSE() {
8241 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8242 } IEM_MC_ENDIF();
8243 IEM_MC_ADVANCE_RIP_AND_FINISH();
8244 IEM_MC_END();
8245 }
8246 else
8247 {
8248 /* memory target */
8249 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8253 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8254 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8255 } IEM_MC_ELSE() {
8256 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8257 } IEM_MC_ENDIF();
8258 IEM_MC_ADVANCE_RIP_AND_FINISH();
8259 IEM_MC_END();
8260 }
8261}
8262
8263
8264/**
8265 * @opcode 0x93
8266 * @opfltest cf
8267 */
8268FNIEMOP_DEF(iemOp_setnc_Eb)
8269{
8270 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8271 IEMOP_HLP_MIN_386();
8272 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8273
8274 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8275 * any way. AMD says it's "unused", whatever that means. We're
8276 * ignoring for now. */
8277 if (IEM_IS_MODRM_REG_MODE(bRm))
8278 {
8279 /* register target */
8280 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8282 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8283 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8284 } IEM_MC_ELSE() {
8285 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8286 } IEM_MC_ENDIF();
8287 IEM_MC_ADVANCE_RIP_AND_FINISH();
8288 IEM_MC_END();
8289 }
8290 else
8291 {
8292 /* memory target */
8293 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8297 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8298 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8299 } IEM_MC_ELSE() {
8300 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8301 } IEM_MC_ENDIF();
8302 IEM_MC_ADVANCE_RIP_AND_FINISH();
8303 IEM_MC_END();
8304 }
8305}
8306
8307
8308/**
8309 * @opcode 0x94
8310 * @opfltest zf
8311 */
8312FNIEMOP_DEF(iemOp_sete_Eb)
8313{
8314 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8315 IEMOP_HLP_MIN_386();
8316 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8317
8318 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8319 * any way. AMD says it's "unused", whatever that means. We're
8320 * ignoring for now. */
8321 if (IEM_IS_MODRM_REG_MODE(bRm))
8322 {
8323 /* register target */
8324 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8326 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8327 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8328 } IEM_MC_ELSE() {
8329 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8330 } IEM_MC_ENDIF();
8331 IEM_MC_ADVANCE_RIP_AND_FINISH();
8332 IEM_MC_END();
8333 }
8334 else
8335 {
8336 /* memory target */
8337 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8341 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8342 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8343 } IEM_MC_ELSE() {
8344 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8345 } IEM_MC_ENDIF();
8346 IEM_MC_ADVANCE_RIP_AND_FINISH();
8347 IEM_MC_END();
8348 }
8349}
8350
8351
8352/**
8353 * @opcode 0x95
8354 * @opfltest zf
8355 */
8356FNIEMOP_DEF(iemOp_setne_Eb)
8357{
8358 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8359 IEMOP_HLP_MIN_386();
8360 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8361
8362 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8363 * any way. AMD says it's "unused", whatever that means. We're
8364 * ignoring for now. */
8365 if (IEM_IS_MODRM_REG_MODE(bRm))
8366 {
8367 /* register target */
8368 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8370 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8371 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8372 } IEM_MC_ELSE() {
8373 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8374 } IEM_MC_ENDIF();
8375 IEM_MC_ADVANCE_RIP_AND_FINISH();
8376 IEM_MC_END();
8377 }
8378 else
8379 {
8380 /* memory target */
8381 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8385 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8386 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8387 } IEM_MC_ELSE() {
8388 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8389 } IEM_MC_ENDIF();
8390 IEM_MC_ADVANCE_RIP_AND_FINISH();
8391 IEM_MC_END();
8392 }
8393}
8394
8395
8396/**
8397 * @opcode 0x96
8398 * @opfltest cf,zf
8399 */
8400FNIEMOP_DEF(iemOp_setbe_Eb)
8401{
8402 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8403 IEMOP_HLP_MIN_386();
8404 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8405
8406 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8407 * any way. AMD says it's "unused", whatever that means. We're
8408 * ignoring for now. */
8409 if (IEM_IS_MODRM_REG_MODE(bRm))
8410 {
8411 /* register target */
8412 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8414 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8415 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8416 } IEM_MC_ELSE() {
8417 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8418 } IEM_MC_ENDIF();
8419 IEM_MC_ADVANCE_RIP_AND_FINISH();
8420 IEM_MC_END();
8421 }
8422 else
8423 {
8424 /* memory target */
8425 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8429 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8430 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8431 } IEM_MC_ELSE() {
8432 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8433 } IEM_MC_ENDIF();
8434 IEM_MC_ADVANCE_RIP_AND_FINISH();
8435 IEM_MC_END();
8436 }
8437}
8438
8439
8440/**
8441 * @opcode 0x97
8442 * @opfltest cf,zf
8443 */
8444FNIEMOP_DEF(iemOp_setnbe_Eb)
8445{
8446 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8447 IEMOP_HLP_MIN_386();
8448 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8449
8450 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8451 * any way. AMD says it's "unused", whatever that means. We're
8452 * ignoring for now. */
8453 if (IEM_IS_MODRM_REG_MODE(bRm))
8454 {
8455 /* register target */
8456 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8458 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8459 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8460 } IEM_MC_ELSE() {
8461 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8462 } IEM_MC_ENDIF();
8463 IEM_MC_ADVANCE_RIP_AND_FINISH();
8464 IEM_MC_END();
8465 }
8466 else
8467 {
8468 /* memory target */
8469 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8473 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8474 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8475 } IEM_MC_ELSE() {
8476 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8477 } IEM_MC_ENDIF();
8478 IEM_MC_ADVANCE_RIP_AND_FINISH();
8479 IEM_MC_END();
8480 }
8481}
8482
8483
8484/**
8485 * @opcode 0x98
8486 * @opfltest sf
8487 */
8488FNIEMOP_DEF(iemOp_sets_Eb)
8489{
8490 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8491 IEMOP_HLP_MIN_386();
8492 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8493
8494 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8495 * any way. AMD says it's "unused", whatever that means. We're
8496 * ignoring for now. */
8497 if (IEM_IS_MODRM_REG_MODE(bRm))
8498 {
8499 /* register target */
8500 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8502 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8503 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8504 } IEM_MC_ELSE() {
8505 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8506 } IEM_MC_ENDIF();
8507 IEM_MC_ADVANCE_RIP_AND_FINISH();
8508 IEM_MC_END();
8509 }
8510 else
8511 {
8512 /* memory target */
8513 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8517 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8518 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8519 } IEM_MC_ELSE() {
8520 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8521 } IEM_MC_ENDIF();
8522 IEM_MC_ADVANCE_RIP_AND_FINISH();
8523 IEM_MC_END();
8524 }
8525}
8526
8527
8528/**
8529 * @opcode 0x99
8530 * @opfltest sf
8531 */
8532FNIEMOP_DEF(iemOp_setns_Eb)
8533{
8534 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8535 IEMOP_HLP_MIN_386();
8536 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8537
8538 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8539 * any way. AMD says it's "unused", whatever that means. We're
8540 * ignoring for now. */
8541 if (IEM_IS_MODRM_REG_MODE(bRm))
8542 {
8543 /* register target */
8544 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8546 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8547 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8548 } IEM_MC_ELSE() {
8549 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8550 } IEM_MC_ENDIF();
8551 IEM_MC_ADVANCE_RIP_AND_FINISH();
8552 IEM_MC_END();
8553 }
8554 else
8555 {
8556 /* memory target */
8557 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8561 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8562 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8563 } IEM_MC_ELSE() {
8564 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8565 } IEM_MC_ENDIF();
8566 IEM_MC_ADVANCE_RIP_AND_FINISH();
8567 IEM_MC_END();
8568 }
8569}
8570
8571
8572/**
8573 * @opcode 0x9a
8574 * @opfltest pf
8575 */
8576FNIEMOP_DEF(iemOp_setp_Eb)
8577{
8578 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8579 IEMOP_HLP_MIN_386();
8580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8581
8582 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8583 * any way. AMD says it's "unused", whatever that means. We're
8584 * ignoring for now. */
8585 if (IEM_IS_MODRM_REG_MODE(bRm))
8586 {
8587 /* register target */
8588 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8590 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8591 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8592 } IEM_MC_ELSE() {
8593 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8594 } IEM_MC_ENDIF();
8595 IEM_MC_ADVANCE_RIP_AND_FINISH();
8596 IEM_MC_END();
8597 }
8598 else
8599 {
8600 /* memory target */
8601 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8605 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8606 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8607 } IEM_MC_ELSE() {
8608 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8609 } IEM_MC_ENDIF();
8610 IEM_MC_ADVANCE_RIP_AND_FINISH();
8611 IEM_MC_END();
8612 }
8613}
8614
8615
8616/**
8617 * @opcode 0x9b
8618 * @opfltest pf
8619 */
8620FNIEMOP_DEF(iemOp_setnp_Eb)
8621{
8622 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8623 IEMOP_HLP_MIN_386();
8624 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8625
8626 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8627 * any way. AMD says it's "unused", whatever that means. We're
8628 * ignoring for now. */
8629 if (IEM_IS_MODRM_REG_MODE(bRm))
8630 {
8631 /* register target */
8632 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8634 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8635 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8636 } IEM_MC_ELSE() {
8637 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8638 } IEM_MC_ENDIF();
8639 IEM_MC_ADVANCE_RIP_AND_FINISH();
8640 IEM_MC_END();
8641 }
8642 else
8643 {
8644 /* memory target */
8645 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8649 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8650 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8651 } IEM_MC_ELSE() {
8652 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8653 } IEM_MC_ENDIF();
8654 IEM_MC_ADVANCE_RIP_AND_FINISH();
8655 IEM_MC_END();
8656 }
8657}
8658
8659
8660/**
8661 * @opcode 0x9c
8662 * @opfltest sf,of
8663 */
8664FNIEMOP_DEF(iemOp_setl_Eb)
8665{
8666 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8667 IEMOP_HLP_MIN_386();
8668 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8669
8670 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8671 * any way. AMD says it's "unused", whatever that means. We're
8672 * ignoring for now. */
8673 if (IEM_IS_MODRM_REG_MODE(bRm))
8674 {
8675 /* register target */
8676 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8678 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8679 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8680 } IEM_MC_ELSE() {
8681 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8682 } IEM_MC_ENDIF();
8683 IEM_MC_ADVANCE_RIP_AND_FINISH();
8684 IEM_MC_END();
8685 }
8686 else
8687 {
8688 /* memory target */
8689 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8693 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8694 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8695 } IEM_MC_ELSE() {
8696 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8697 } IEM_MC_ENDIF();
8698 IEM_MC_ADVANCE_RIP_AND_FINISH();
8699 IEM_MC_END();
8700 }
8701}
8702
8703
8704/**
8705 * @opcode 0x9d
8706 * @opfltest sf,of
8707 */
8708FNIEMOP_DEF(iemOp_setnl_Eb)
8709{
8710 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8711 IEMOP_HLP_MIN_386();
8712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8713
8714 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8715 * any way. AMD says it's "unused", whatever that means. We're
8716 * ignoring for now. */
8717 if (IEM_IS_MODRM_REG_MODE(bRm))
8718 {
8719 /* register target */
8720 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8722 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8723 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8724 } IEM_MC_ELSE() {
8725 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8726 } IEM_MC_ENDIF();
8727 IEM_MC_ADVANCE_RIP_AND_FINISH();
8728 IEM_MC_END();
8729 }
8730 else
8731 {
8732 /* memory target */
8733 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8737 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8738 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8739 } IEM_MC_ELSE() {
8740 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8741 } IEM_MC_ENDIF();
8742 IEM_MC_ADVANCE_RIP_AND_FINISH();
8743 IEM_MC_END();
8744 }
8745}
8746
8747
8748/**
8749 * @opcode 0x9e
8750 * @opfltest zf,sf,of
8751 */
8752FNIEMOP_DEF(iemOp_setle_Eb)
8753{
8754 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8755 IEMOP_HLP_MIN_386();
8756 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8757
8758 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8759 * any way. AMD says it's "unused", whatever that means. We're
8760 * ignoring for now. */
8761 if (IEM_IS_MODRM_REG_MODE(bRm))
8762 {
8763 /* register target */
8764 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8766 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8767 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8768 } IEM_MC_ELSE() {
8769 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8770 } IEM_MC_ENDIF();
8771 IEM_MC_ADVANCE_RIP_AND_FINISH();
8772 IEM_MC_END();
8773 }
8774 else
8775 {
8776 /* memory target */
8777 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8781 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8782 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8783 } IEM_MC_ELSE() {
8784 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8785 } IEM_MC_ENDIF();
8786 IEM_MC_ADVANCE_RIP_AND_FINISH();
8787 IEM_MC_END();
8788 }
8789}
8790
8791
8792/**
8793 * @opcode 0x9f
8794 * @opfltest zf,sf,of
8795 */
8796FNIEMOP_DEF(iemOp_setnle_Eb)
8797{
8798 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8799 IEMOP_HLP_MIN_386();
8800 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8801
8802 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8803 * any way. AMD says it's "unused", whatever that means. We're
8804 * ignoring for now. */
8805 if (IEM_IS_MODRM_REG_MODE(bRm))
8806 {
8807 /* register target */
8808 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8810 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8811 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8812 } IEM_MC_ELSE() {
8813 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8814 } IEM_MC_ENDIF();
8815 IEM_MC_ADVANCE_RIP_AND_FINISH();
8816 IEM_MC_END();
8817 }
8818 else
8819 {
8820 /* memory target */
8821 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8822 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8825 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8826 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8827 } IEM_MC_ELSE() {
8828 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8829 } IEM_MC_ENDIF();
8830 IEM_MC_ADVANCE_RIP_AND_FINISH();
8831 IEM_MC_END();
8832 }
8833}
8834
8835
8836/** Opcode 0x0f 0xa0. */
8837FNIEMOP_DEF(iemOp_push_fs)
8838{
8839 IEMOP_MNEMONIC(push_fs, "push fs");
8840 IEMOP_HLP_MIN_386();
8841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8842 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8843}
8844
8845
8846/** Opcode 0x0f 0xa1. */
8847FNIEMOP_DEF(iemOp_pop_fs)
8848{
8849 IEMOP_MNEMONIC(pop_fs, "pop fs");
8850 IEMOP_HLP_MIN_386();
8851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8852 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8853 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
8854 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8855 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8856 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8857 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8858 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS),
8859 iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8860}
8861
8862
8863/** Opcode 0x0f 0xa2. */
8864FNIEMOP_DEF(iemOp_cpuid)
8865{
8866 IEMOP_MNEMONIC(cpuid, "cpuid");
8867 IEMOP_HLP_MIN_486(); /* not all 486es. */
8868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8869 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
8870 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8871 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
8872 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
8873 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX),
8874 iemCImpl_cpuid);
8875}
8876
8877
8878/**
8879 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8880 * iemOp_bts_Ev_Gv.
8881 */
8882
8883#define IEMOP_BODY_BIT_Ev_Gv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
8884 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8885 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8886 \
8887 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8888 { \
8889 /* register destination. */ \
8890 switch (pVCpu->iem.s.enmEffOpSize) \
8891 { \
8892 case IEMMODE_16BIT: \
8893 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8895 \
8896 IEM_MC_ARG(uint16_t, u16Src, 2); \
8897 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8898 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
8899 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
8900 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8901 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8902 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
8903 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8904 \
8905 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8906 IEM_MC_END(); \
8907 break; \
8908 \
8909 case IEMMODE_32BIT: \
8910 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8912 \
8913 IEM_MC_ARG(uint32_t, u32Src, 2); \
8914 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8915 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
8916 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
8917 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8918 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8919 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
8920 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8921 \
8922 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8923 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8924 IEM_MC_END(); \
8925 break; \
8926 \
8927 case IEMMODE_64BIT: \
8928 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
8929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8930 \
8931 IEM_MC_ARG(uint64_t, u64Src, 2); \
8932 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8933 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
8934 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
8935 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8936 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8937 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
8938 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8939 \
8940 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8941 IEM_MC_END(); \
8942 break; \
8943 \
8944 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8945 } \
8946 } \
8947 else \
8948 { \
8949 /* memory destination. */ \
8950 /** @todo test negative bit offsets! */ \
8951 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
8952 { \
8953 switch (pVCpu->iem.s.enmEffOpSize) \
8954 { \
8955 case IEMMODE_16BIT: \
8956 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8959 IEMOP_HLP_DONE_DECODING(); \
8960 \
8961 IEM_MC_ARG(uint16_t, u16Src, 2); \
8962 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8963 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
8964 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
8965 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
8966 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
8967 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
8968 \
8969 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8970 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
8971 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8972 \
8973 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8974 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
8975 \
8976 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8977 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8978 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8979 IEM_MC_END(); \
8980 break; \
8981 \
8982 case IEMMODE_32BIT: \
8983 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8984 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8986 IEMOP_HLP_DONE_DECODING(); \
8987 \
8988 IEM_MC_ARG(uint32_t, u32Src, 2); \
8989 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8990 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
8991 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
8992 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
8993 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
8994 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
8995 \
8996 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8997 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
8998 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8999 \
9000 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9001 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
9002 \
9003 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9004 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9005 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9006 IEM_MC_END(); \
9007 break; \
9008 \
9009 case IEMMODE_64BIT: \
9010 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9013 IEMOP_HLP_DONE_DECODING(); \
9014 \
9015 IEM_MC_ARG(uint64_t, u64Src, 2); \
9016 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9017 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9018 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9019 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9020 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9021 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9022 \
9023 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9024 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
9025 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9026 \
9027 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9028 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
9029 \
9030 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9031 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9032 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9033 IEM_MC_END(); \
9034 break; \
9035 \
9036 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9037 } \
9038 } \
9039 else \
9040 { \
9041 (void)0
9042/* Separate macro to work around parsing issue in IEMAllInstPython.py */
9043#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
9044 switch (pVCpu->iem.s.enmEffOpSize) \
9045 { \
9046 case IEMMODE_16BIT: \
9047 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9050 IEMOP_HLP_DONE_DECODING(); \
9051 \
9052 IEM_MC_ARG(uint16_t, u16Src, 2); \
9053 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9054 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9055 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9056 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9057 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9058 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9059 \
9060 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9061 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
9062 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9063 \
9064 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9065 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU16, fEFlagsIn, pu16Dst, u16Src); \
9066 \
9067 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9068 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9069 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9070 IEM_MC_END(); \
9071 break; \
9072 \
9073 case IEMMODE_32BIT: \
9074 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9077 IEMOP_HLP_DONE_DECODING(); \
9078 \
9079 IEM_MC_ARG(uint32_t, u32Src, 2); \
9080 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9081 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9082 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9083 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9084 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9085 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9086 \
9087 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9088 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
9089 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9090 \
9091 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9092 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU32, fEFlagsIn, pu32Dst, u32Src); \
9093 \
9094 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9095 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9096 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9097 IEM_MC_END(); \
9098 break; \
9099 \
9100 case IEMMODE_64BIT: \
9101 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9102 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9104 IEMOP_HLP_DONE_DECODING(); \
9105 \
9106 IEM_MC_ARG(uint64_t, u64Src, 2); \
9107 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9108 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9109 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9110 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9111 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9112 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9113 \
9114 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9115 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
9116 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9117 \
9118 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9119 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU64, fEFlagsIn, pu64Dst, u64Src); \
9120 \
9121 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9122 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9123 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9124 IEM_MC_END(); \
9125 break; \
9126 \
9127 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9128 } \
9129 } \
9130 } \
9131 (void)0
9132
9133/* Read-only version (bt). */
9134#define IEMOP_BODY_BIT_Ev_Gv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
9135 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9136 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
9137 \
9138 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9139 { \
9140 /* register destination. */ \
9141 switch (pVCpu->iem.s.enmEffOpSize) \
9142 { \
9143 case IEMMODE_16BIT: \
9144 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9146 \
9147 IEM_MC_ARG(uint16_t, u16Src, 2); \
9148 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9149 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9150 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
9151 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9152 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9153 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
9154 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9155 \
9156 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9157 IEM_MC_END(); \
9158 break; \
9159 \
9160 case IEMMODE_32BIT: \
9161 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9163 \
9164 IEM_MC_ARG(uint32_t, u32Src, 2); \
9165 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9166 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9167 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
9168 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9169 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9170 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
9171 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9172 \
9173 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9174 IEM_MC_END(); \
9175 break; \
9176 \
9177 case IEMMODE_64BIT: \
9178 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9180 \
9181 IEM_MC_ARG(uint64_t, u64Src, 2); \
9182 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9183 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9184 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
9185 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9186 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9187 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
9188 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9189 \
9190 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9191 IEM_MC_END(); \
9192 break; \
9193 \
9194 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9195 } \
9196 } \
9197 else \
9198 { \
9199 /* memory destination. */ \
9200 /** @todo test negative bit offsets! */ \
9201 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9202 { \
9203 switch (pVCpu->iem.s.enmEffOpSize) \
9204 { \
9205 case IEMMODE_16BIT: \
9206 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9209 IEMOP_HLP_DONE_DECODING(); \
9210 \
9211 IEM_MC_ARG(uint16_t, u16Src, 2); \
9212 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9213 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9214 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9215 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9216 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9217 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9218 \
9219 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9220 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
9221 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9222 \
9223 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9224 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
9225 \
9226 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9227 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9228 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9229 IEM_MC_END(); \
9230 break; \
9231 \
9232 case IEMMODE_32BIT: \
9233 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9236 IEMOP_HLP_DONE_DECODING(); \
9237 \
9238 IEM_MC_ARG(uint32_t, u32Src, 2); \
9239 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9240 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9241 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9242 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9243 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9244 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9245 \
9246 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
9247 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9248 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9249 \
9250 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9251 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
9252 \
9253 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9254 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9255 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9256 IEM_MC_END(); \
9257 break; \
9258 \
9259 case IEMMODE_64BIT: \
9260 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9263 IEMOP_HLP_DONE_DECODING(); \
9264 \
9265 IEM_MC_ARG(uint64_t, u64Src, 2); \
9266 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9267 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9268 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9269 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9270 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9271 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9272 \
9273 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9274 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
9275 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9276 \
9277 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9278 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
9279 \
9280 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9281 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9282 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9283 IEM_MC_END(); \
9284 break; \
9285 \
9286 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9287 } \
9288 } \
9289 else \
9290 { \
9291 IEMOP_HLP_DONE_DECODING(); \
9292 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9293 } \
9294 } \
9295 (void)0
9296
9297
9298/**
9299 * @opcode 0xa3
9300 * @oppfx n/a
9301 * @opflclass bitmap
9302 */
9303FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9304{
9305 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9306 IEMOP_HLP_MIN_386();
9307 IEMOP_BODY_BIT_Ev_Gv_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
9308}
9309
9310
9311/**
9312 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9313 */
9314#define IEMOP_BODY_SHLD_SHR_Ib(a_pImplExpr) \
9315 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9316 \
9317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9318 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9319 \
9320 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9321 { \
9322 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9323 \
9324 switch (pVCpu->iem.s.enmEffOpSize) \
9325 { \
9326 case IEMMODE_16BIT: \
9327 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9329 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9330 IEM_MC_ARG(uint16_t, u16Src, 1); \
9331 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9332 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9333 \
9334 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9335 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9336 IEM_MC_REF_EFLAGS(pEFlags); \
9337 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9338 \
9339 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9340 IEM_MC_END(); \
9341 break; \
9342 \
9343 case IEMMODE_32BIT: \
9344 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9346 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9347 IEM_MC_ARG(uint32_t, u32Src, 1); \
9348 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9349 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9350 \
9351 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9352 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9353 IEM_MC_REF_EFLAGS(pEFlags); \
9354 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9355 \
9356 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9357 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9358 IEM_MC_END(); \
9359 break; \
9360 \
9361 case IEMMODE_64BIT: \
9362 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9364 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9365 IEM_MC_ARG(uint64_t, u64Src, 1); \
9366 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9367 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9368 \
9369 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9370 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9371 IEM_MC_REF_EFLAGS(pEFlags); \
9372 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9373 \
9374 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9375 IEM_MC_END(); \
9376 break; \
9377 \
9378 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9379 } \
9380 } \
9381 else \
9382 { \
9383 switch (pVCpu->iem.s.enmEffOpSize) \
9384 { \
9385 case IEMMODE_16BIT: \
9386 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9389 \
9390 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9392 \
9393 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9394 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9395 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9396 \
9397 IEM_MC_ARG(uint16_t, u16Src, 1); \
9398 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9399 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9400 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9401 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9402 \
9403 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9404 IEM_MC_COMMIT_EFLAGS(EFlags); \
9405 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9406 IEM_MC_END(); \
9407 break; \
9408 \
9409 case IEMMODE_32BIT: \
9410 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9413 \
9414 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9416 \
9417 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9418 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9419 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9420 \
9421 IEM_MC_ARG(uint32_t, u32Src, 1); \
9422 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9423 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9424 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9425 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9426 \
9427 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9428 IEM_MC_COMMIT_EFLAGS(EFlags); \
9429 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9430 IEM_MC_END(); \
9431 break; \
9432 \
9433 case IEMMODE_64BIT: \
9434 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9437 \
9438 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9440 \
9441 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9442 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9443 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9444 \
9445 IEM_MC_ARG(uint64_t, u64Src, 1); \
9446 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9447 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9448 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9449 \
9450 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9451 \
9452 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9453 IEM_MC_COMMIT_EFLAGS(EFlags); \
9454 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9455 IEM_MC_END(); \
9456 break; \
9457 \
9458 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9459 } \
9460 } (void)0
9461
9462
9463/**
9464 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9465 */
9466#define IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(a_pImplExpr) \
9467 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9468 \
9469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9470 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9471 \
9472 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9473 { \
9474 switch (pVCpu->iem.s.enmEffOpSize) \
9475 { \
9476 case IEMMODE_16BIT: \
9477 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9479 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9480 IEM_MC_ARG(uint16_t, u16Src, 1); \
9481 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9482 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9483 \
9484 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9485 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9486 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9487 IEM_MC_REF_EFLAGS(pEFlags); \
9488 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9489 \
9490 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9491 IEM_MC_END(); \
9492 break; \
9493 \
9494 case IEMMODE_32BIT: \
9495 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9497 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9498 IEM_MC_ARG(uint32_t, u32Src, 1); \
9499 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9500 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9501 \
9502 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9503 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9504 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9505 IEM_MC_REF_EFLAGS(pEFlags); \
9506 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9507 \
9508 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9509 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9510 IEM_MC_END(); \
9511 break; \
9512 \
9513 case IEMMODE_64BIT: \
9514 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9516 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9517 IEM_MC_ARG(uint64_t, u64Src, 1); \
9518 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9519 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9520 \
9521 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9522 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9523 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9524 IEM_MC_REF_EFLAGS(pEFlags); \
9525 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9526 \
9527 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9528 IEM_MC_END(); \
9529 break; \
9530 \
9531 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9532 } \
9533 } \
9534 else \
9535 { \
9536 switch (pVCpu->iem.s.enmEffOpSize) \
9537 { \
9538 case IEMMODE_16BIT: \
9539 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9540 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9541 IEM_MC_ARG(uint16_t, u16Src, 1); \
9542 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9544 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9545 \
9546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9548 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9549 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9550 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9551 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9552 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9553 \
9554 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9555 IEM_MC_COMMIT_EFLAGS(EFlags); \
9556 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9557 IEM_MC_END(); \
9558 break; \
9559 \
9560 case IEMMODE_32BIT: \
9561 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9562 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9563 IEM_MC_ARG(uint32_t, u32Src, 1); \
9564 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9566 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9567 \
9568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9570 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9571 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9572 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9573 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9574 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9575 \
9576 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9577 IEM_MC_COMMIT_EFLAGS(EFlags); \
9578 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9579 IEM_MC_END(); \
9580 break; \
9581 \
9582 case IEMMODE_64BIT: \
9583 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9584 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9585 IEM_MC_ARG(uint64_t, u64Src, 1); \
9586 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9587 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9588 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9589 \
9590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9592 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9593 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9594 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9595 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9596 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9597 \
9598 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9599 IEM_MC_COMMIT_EFLAGS(EFlags); \
9600 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9601 IEM_MC_END(); \
9602 break; \
9603 \
9604 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9605 } \
9606 } (void)0
9607
9608
9609/**
9610 * @opcode 0xa4
9611 * @opflclass shift_count
9612 */
9613FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9614{
9615 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9616 IEMOP_HLP_MIN_386();
9617 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9618}
9619
9620
9621/**
9622 * @opcode 0xa5
9623 * @opflclass shift_count
9624 */
9625FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9626{
9627 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9628 IEMOP_HLP_MIN_386();
9629 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9630}
9631
9632
9633/** Opcode 0x0f 0xa8. */
9634FNIEMOP_DEF(iemOp_push_gs)
9635{
9636 IEMOP_MNEMONIC(push_gs, "push gs");
9637 IEMOP_HLP_MIN_386();
9638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9639 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9640}
9641
9642
9643/** Opcode 0x0f 0xa9. */
9644FNIEMOP_DEF(iemOp_pop_gs)
9645{
9646 IEMOP_MNEMONIC(pop_gs, "pop gs");
9647 IEMOP_HLP_MIN_386();
9648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9649 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9650 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
9651 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9652 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9653 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9654 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9655 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9656 iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9657}
9658
9659
9660/** Opcode 0x0f 0xaa. */
9661FNIEMOP_DEF(iemOp_rsm)
9662{
9663 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9664 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9666 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9667 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
9668 iemCImpl_rsm);
9669}
9670
9671
9672
9673/**
9674 * @opcode 0xab
9675 * @oppfx n/a
9676 * @opflclass bitmap
9677 */
9678FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9679{
9680 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9681 IEMOP_HLP_MIN_386();
9682 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
9683 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9684}
9685
9686
9687/**
9688 * @opcode 0xac
9689 * @opflclass shift_count
9690 */
9691FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9692{
9693 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9694 IEMOP_HLP_MIN_386();
9695 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9696}
9697
9698
9699/**
9700 * @opcode 0xad
9701 * @opflclass shift_count
9702 */
9703FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9704{
9705 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9706 IEMOP_HLP_MIN_386();
9707 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9708}
9709
9710
9711/** Opcode 0x0f 0xae mem/0. */
9712FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9713{
9714 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9715 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9716 IEMOP_RAISE_INVALID_OPCODE_RET();
9717
9718 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9719 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9722 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9723 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9724 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9725 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9726 IEM_MC_END();
9727}
9728
9729
9730/** Opcode 0x0f 0xae mem/1. */
9731FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9732{
9733 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9734 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9735 IEMOP_RAISE_INVALID_OPCODE_RET();
9736
9737 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9738 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9741 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9742 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9743 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9744 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw) | RT_BIT_64(kIemNativeGstReg_MxCsr),
9745 iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9746 IEM_MC_END();
9747}
9748
9749
9750/**
9751 * @opmaps grp15
9752 * @opcode !11/2
9753 * @oppfx none
9754 * @opcpuid sse
9755 * @opgroup og_sse_mxcsrsm
9756 * @opxcpttype 5
9757 * @optest op1=0 -> mxcsr=0
9758 * @optest op1=0x2083 -> mxcsr=0x2083
9759 * @optest op1=0xfffffffe -> value.xcpt=0xd
9760 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9761 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9762 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9763 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9764 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9765 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9766 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9767 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9768 */
9769FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9770{
9771 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9772 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9773 IEMOP_RAISE_INVALID_OPCODE_RET();
9774
9775 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9776 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9779 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9780 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9781 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_MxCsr), iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9782 IEM_MC_END();
9783}
9784
9785
9786/**
9787 * @opmaps grp15
9788 * @opcode !11/3
9789 * @oppfx none
9790 * @opcpuid sse
9791 * @opgroup og_sse_mxcsrsm
9792 * @opxcpttype 5
9793 * @optest mxcsr=0 -> op1=0
9794 * @optest mxcsr=0x2083 -> op1=0x2083
9795 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9796 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9797 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9798 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9799 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9800 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9801 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9802 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9803 */
9804FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9805{
9806 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9807 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9808 IEMOP_RAISE_INVALID_OPCODE_RET();
9809
9810 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9811 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9814 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9815 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9816 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9817 IEM_MC_END();
9818}
9819
9820
9821/**
9822 * @opmaps grp15
9823 * @opcode !11/4
9824 * @oppfx none
9825 * @opcpuid xsave
9826 * @opgroup og_system
9827 * @opxcpttype none
9828 */
9829FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9830{
9831 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9832 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9833 IEMOP_RAISE_INVALID_OPCODE_RET();
9834
9835 IEM_MC_BEGIN(IEM_MC_F_MIN_CORE, 0);
9836 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9839 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9840 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9841 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9842 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9843 IEM_MC_END();
9844}
9845
9846
9847/**
9848 * @opmaps grp15
9849 * @opcode !11/5
9850 * @oppfx none
9851 * @opcpuid xsave
9852 * @opgroup og_system
9853 * @opxcpttype none
9854 */
9855FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9856{
9857 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9858 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9859 IEMOP_RAISE_INVALID_OPCODE_RET();
9860
9861 IEM_MC_BEGIN(IEM_MC_F_MIN_CORE, 0);
9862 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9865 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9866 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9867 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9868 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw) | RT_BIT_64(kIemNativeGstReg_MxCsr),
9869 iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9870 IEM_MC_END();
9871}
9872
9873/** Opcode 0x0f 0xae mem/6. */
9874FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9875
9876/**
9877 * @opmaps grp15
9878 * @opcode !11/7
9879 * @oppfx none
9880 * @opcpuid clfsh
9881 * @opgroup og_cachectl
9882 * @optest op1=1 ->
9883 */
9884FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9885{
9886 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9887 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9888 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9889
9890 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9891 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9894 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9895 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9896 IEM_MC_END();
9897}
9898
9899/**
9900 * @opmaps grp15
9901 * @opcode !11/7
9902 * @oppfx 0x66
9903 * @opcpuid clflushopt
9904 * @opgroup og_cachectl
9905 * @optest op1=1 ->
9906 */
9907FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9908{
9909 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9910 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9911 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9912
9913 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9914 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9917 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9918 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9919 IEM_MC_END();
9920}
9921
9922
9923/** Opcode 0x0f 0xae 11b/5. */
9924FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9925{
9926 RT_NOREF_PV(bRm);
9927 IEMOP_MNEMONIC(lfence, "lfence");
9928 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9930#ifdef RT_ARCH_ARM64
9931 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9932#else
9933 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9934 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9935 else
9936 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9937#endif
9938 IEM_MC_ADVANCE_RIP_AND_FINISH();
9939 IEM_MC_END();
9940}
9941
9942
9943/** Opcode 0x0f 0xae 11b/6. */
9944FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9945{
9946 RT_NOREF_PV(bRm);
9947 IEMOP_MNEMONIC(mfence, "mfence");
9948 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9950#ifdef RT_ARCH_ARM64
9951 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9952#else
9953 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9954 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9955 else
9956 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9957#endif
9958 IEM_MC_ADVANCE_RIP_AND_FINISH();
9959 IEM_MC_END();
9960}
9961
9962
9963/** Opcode 0x0f 0xae 11b/7. */
9964FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9965{
9966 RT_NOREF_PV(bRm);
9967 IEMOP_MNEMONIC(sfence, "sfence");
9968 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9970#ifdef RT_ARCH_ARM64
9971 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9972#else
9973 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9974 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9975 else
9976 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9977#endif
9978 IEM_MC_ADVANCE_RIP_AND_FINISH();
9979 IEM_MC_END();
9980}
9981
9982
9983/** Opcode 0xf3 0x0f 0xae 11b/0. */
9984FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9985{
9986 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9987 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9988 {
9989 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9991 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9992 IEM_MC_LOCAL(uint64_t, u64Dst);
9993 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9994 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9995 IEM_MC_ADVANCE_RIP_AND_FINISH();
9996 IEM_MC_END();
9997 }
9998 else
9999 {
10000 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10002 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10003 IEM_MC_LOCAL(uint32_t, u32Dst);
10004 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
10005 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10006 IEM_MC_ADVANCE_RIP_AND_FINISH();
10007 IEM_MC_END();
10008 }
10009}
10010
10011
10012/** Opcode 0xf3 0x0f 0xae 11b/1. */
10013FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
10014{
10015 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
10016 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10017 {
10018 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10020 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10021 IEM_MC_LOCAL(uint64_t, u64Dst);
10022 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
10023 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10024 IEM_MC_ADVANCE_RIP_AND_FINISH();
10025 IEM_MC_END();
10026 }
10027 else
10028 {
10029 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10031 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10032 IEM_MC_LOCAL(uint32_t, u32Dst);
10033 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
10034 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10035 IEM_MC_ADVANCE_RIP_AND_FINISH();
10036 IEM_MC_END();
10037 }
10038}
10039
10040
10041/** Opcode 0xf3 0x0f 0xae 11b/2. */
10042FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
10043{
10044 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
10045 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10046 {
10047 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10049 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10050 IEM_MC_LOCAL(uint64_t, u64Dst);
10051 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10052 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10053 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
10054 IEM_MC_ADVANCE_RIP_AND_FINISH();
10055 IEM_MC_END();
10056 }
10057 else
10058 {
10059 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10061 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10062 IEM_MC_LOCAL(uint32_t, u32Dst);
10063 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10064 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
10065 IEM_MC_ADVANCE_RIP_AND_FINISH();
10066 IEM_MC_END();
10067 }
10068}
10069
10070
10071/** Opcode 0xf3 0x0f 0xae 11b/3. */
10072FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
10073{
10074 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
10075 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10076 {
10077 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10079 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10080 IEM_MC_LOCAL(uint64_t, u64Dst);
10081 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10082 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10083 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
10084 IEM_MC_ADVANCE_RIP_AND_FINISH();
10085 IEM_MC_END();
10086 }
10087 else
10088 {
10089 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10091 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10092 IEM_MC_LOCAL(uint32_t, u32Dst);
10093 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10094 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10095 IEM_MC_ADVANCE_RIP_AND_FINISH();
10096 IEM_MC_END();
10097 }
10098}
10099
10100
10101/**
10102 * Group 15 jump table for register variant.
10103 */
10104IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10105{ /* pfx: none, 066h, 0f3h, 0f2h */
10106 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10107 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10108 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10109 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10110 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10111 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10112 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10113 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10114};
10115AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10116
10117
10118/**
10119 * Group 15 jump table for memory variant.
10120 */
10121IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10122{ /* pfx: none, 066h, 0f3h, 0f2h */
10123 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10124 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10125 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10126 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10127 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10128 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10129 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10130 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10131};
10132AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10133
10134
10135/** Opcode 0x0f 0xae. */
10136FNIEMOP_DEF(iemOp_Grp15)
10137{
10138 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10140 if (IEM_IS_MODRM_REG_MODE(bRm))
10141 /* register, register */
10142 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10143 + pVCpu->iem.s.idxPrefix], bRm);
10144 /* memory, register */
10145 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10146 + pVCpu->iem.s.idxPrefix], bRm);
10147}
10148
10149
10150/**
10151 * @opcode 0xaf
10152 * @opflclass multiply
10153 */
10154FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10155{
10156 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10157 IEMOP_HLP_MIN_386();
10158 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10159 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10161 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_MIN_386, imul, 0);
10162}
10163
10164
10165/**
10166 * @opcode 0xb0
10167 * @opflclass arithmetic
10168 */
10169FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10170{
10171 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10172 IEMOP_HLP_MIN_486();
10173 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10174
10175 if (IEM_IS_MODRM_REG_MODE(bRm))
10176 {
10177 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10179 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10180 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10181 IEM_MC_ARG(uint8_t, u8Src, 2);
10182 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10183
10184 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10185 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10186 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10187 IEM_MC_REF_EFLAGS(pEFlags);
10188 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10189
10190 IEM_MC_ADVANCE_RIP_AND_FINISH();
10191 IEM_MC_END();
10192 }
10193 else
10194 {
10195#define IEMOP_BODY_CMPXCHG_BYTE(a_fnWorker, a_Type) \
10196 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10199 IEMOP_HLP_DONE_DECODING(); \
10200 \
10201 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10202 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
10203 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10204 \
10205 IEM_MC_ARG(uint8_t, u8Src, 2); \
10206 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10207 \
10208 IEM_MC_LOCAL(uint8_t, u8Al); \
10209 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX); \
10210 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Al, u8Al, 1); \
10211 \
10212 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10213 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu8Dst, pu8Al, u8Src, pEFlags); \
10214 \
10215 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10216 IEM_MC_COMMIT_EFLAGS(EFlags); \
10217 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al); \
10218 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10219 IEM_MC_END()
10220
10221 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10222 {
10223 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8,RW);
10224 }
10225 else
10226 {
10227 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8_locked,ATOMIC);
10228 }
10229 }
10230}
10231
10232/**
10233 * @opcode 0xb1
10234 * @opflclass arithmetic
10235 */
10236FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10237{
10238 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10239 IEMOP_HLP_MIN_486();
10240 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10241
10242 if (IEM_IS_MODRM_REG_MODE(bRm))
10243 {
10244 switch (pVCpu->iem.s.enmEffOpSize)
10245 {
10246 case IEMMODE_16BIT:
10247 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10249 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10250 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10251 IEM_MC_ARG(uint16_t, u16Src, 2);
10252 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10253
10254 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10255 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10256 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10257 IEM_MC_REF_EFLAGS(pEFlags);
10258 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10259
10260 IEM_MC_ADVANCE_RIP_AND_FINISH();
10261 IEM_MC_END();
10262 break;
10263
10264 case IEMMODE_32BIT:
10265 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10267 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10268 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10269 IEM_MC_ARG(uint32_t, u32Src, 2);
10270 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10271
10272 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10273 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10274 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10275 IEM_MC_REF_EFLAGS(pEFlags);
10276 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10277
10278 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10279 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
10280 } IEM_MC_ELSE() {
10281 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
10282 } IEM_MC_ENDIF();
10283
10284 IEM_MC_ADVANCE_RIP_AND_FINISH();
10285 IEM_MC_END();
10286 break;
10287
10288 case IEMMODE_64BIT:
10289 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10291 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10292 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10293 IEM_MC_ARG(uint64_t, u64Src, 2);
10294 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10295
10296 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10297 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10298 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10299 IEM_MC_REF_EFLAGS(pEFlags);
10300 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10301
10302 IEM_MC_ADVANCE_RIP_AND_FINISH();
10303 IEM_MC_END();
10304 break;
10305
10306 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10307 }
10308 }
10309 else
10310 {
10311#define IEMOP_BODY_CMPXCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64,a_Type) \
10312 do { \
10313 switch (pVCpu->iem.s.enmEffOpSize) \
10314 { \
10315 case IEMMODE_16BIT: \
10316 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10317 \
10318 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10321 IEMOP_HLP_DONE_DECODING(); \
10322 \
10323 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10324 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10325 \
10326 IEM_MC_ARG(uint16_t, u16Src, 2); \
10327 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10328 \
10329 IEM_MC_LOCAL(uint16_t, u16Ax); \
10330 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX); \
10331 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Ax, u16Ax, 1); \
10332 \
10333 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10334 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker16, pu16Dst, pu16Ax, u16Src, pEFlags); \
10335 \
10336 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10337 IEM_MC_COMMIT_EFLAGS(EFlags); \
10338 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax); \
10339 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10340 IEM_MC_END(); \
10341 break; \
10342 \
10343 case IEMMODE_32BIT: \
10344 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10347 IEMOP_HLP_DONE_DECODING(); \
10348 \
10349 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10350 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10351 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10352 \
10353 IEM_MC_ARG(uint32_t, u32Src, 2); \
10354 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10355 \
10356 IEM_MC_LOCAL(uint32_t, u32Eax); \
10357 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX); \
10358 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Eax, u32Eax, 1); \
10359 \
10360 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10361 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker32, pu32Dst, pu32Eax, u32Src, pEFlags); \
10362 \
10363 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10364 IEM_MC_COMMIT_EFLAGS(EFlags); \
10365 \
10366 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
10367 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax); \
10368 } IEM_MC_ENDIF(); \
10369 \
10370 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10371 IEM_MC_END(); \
10372 break; \
10373 \
10374 case IEMMODE_64BIT: \
10375 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10378 IEMOP_HLP_DONE_DECODING(); \
10379 \
10380 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10381 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10382 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10383 \
10384 IEM_MC_ARG(uint64_t, u64Src, 2); \
10385 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10386 \
10387 IEM_MC_LOCAL(uint64_t, u64Rax); \
10388 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX); \
10389 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Rax, u64Rax, 1); \
10390 \
10391 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10392 \
10393 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker64, pu64Dst, pu64Rax, u64Src, pEFlags); \
10394 \
10395 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10396 IEM_MC_COMMIT_EFLAGS(EFlags); \
10397 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax); \
10398 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10399 IEM_MC_END(); \
10400 break; \
10401 \
10402 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10403 } \
10404 } while (0)
10405
10406 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10407 {
10408 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16, iemAImpl_cmpxchg_u32, iemAImpl_cmpxchg_u64,RW);
10409 }
10410 else
10411 {
10412 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16_locked, iemAImpl_cmpxchg_u32_locked, iemAImpl_cmpxchg_u64_locked,ATOMIC);
10413 }
10414 }
10415}
10416
10417
10418/** Opcode 0x0f 0xb2. */
10419FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10420{
10421 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10422 IEMOP_HLP_MIN_386();
10423 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10424 if (IEM_IS_MODRM_REG_MODE(bRm))
10425 IEMOP_RAISE_INVALID_OPCODE_RET();
10426 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10427}
10428
10429
10430/**
10431 * @opcode 0xb3
10432 * @oppfx n/a
10433 * @opflclass bitmap
10434 */
10435FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10436{
10437 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10438 IEMOP_HLP_MIN_386();
10439 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
10440 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10441}
10442
10443
10444/** Opcode 0x0f 0xb4. */
10445FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10446{
10447 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10448 IEMOP_HLP_MIN_386();
10449 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10450 if (IEM_IS_MODRM_REG_MODE(bRm))
10451 IEMOP_RAISE_INVALID_OPCODE_RET();
10452 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10453}
10454
10455
10456/** Opcode 0x0f 0xb5. */
10457FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10458{
10459 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10460 IEMOP_HLP_MIN_386();
10461 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10462 if (IEM_IS_MODRM_REG_MODE(bRm))
10463 IEMOP_RAISE_INVALID_OPCODE_RET();
10464 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10465}
10466
10467
10468/** Opcode 0x0f 0xb6. */
10469FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10470{
10471 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10472 IEMOP_HLP_MIN_386();
10473
10474 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10475
10476 /*
10477 * If rm is denoting a register, no more instruction bytes.
10478 */
10479 if (IEM_IS_MODRM_REG_MODE(bRm))
10480 {
10481 switch (pVCpu->iem.s.enmEffOpSize)
10482 {
10483 case IEMMODE_16BIT:
10484 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10486 IEM_MC_LOCAL(uint16_t, u16Value);
10487 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10488 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10489 IEM_MC_ADVANCE_RIP_AND_FINISH();
10490 IEM_MC_END();
10491 break;
10492
10493 case IEMMODE_32BIT:
10494 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10496 IEM_MC_LOCAL(uint32_t, u32Value);
10497 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10498 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10499 IEM_MC_ADVANCE_RIP_AND_FINISH();
10500 IEM_MC_END();
10501 break;
10502
10503 case IEMMODE_64BIT:
10504 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10506 IEM_MC_LOCAL(uint64_t, u64Value);
10507 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10508 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10509 IEM_MC_ADVANCE_RIP_AND_FINISH();
10510 IEM_MC_END();
10511 break;
10512
10513 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10514 }
10515 }
10516 else
10517 {
10518 /*
10519 * We're loading a register from memory.
10520 */
10521 switch (pVCpu->iem.s.enmEffOpSize)
10522 {
10523 case IEMMODE_16BIT:
10524 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10525 IEM_MC_LOCAL(uint16_t, u16Value);
10526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10529 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10530 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10531 IEM_MC_ADVANCE_RIP_AND_FINISH();
10532 IEM_MC_END();
10533 break;
10534
10535 case IEMMODE_32BIT:
10536 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10537 IEM_MC_LOCAL(uint32_t, u32Value);
10538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10541 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10542 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10543 IEM_MC_ADVANCE_RIP_AND_FINISH();
10544 IEM_MC_END();
10545 break;
10546
10547 case IEMMODE_64BIT:
10548 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10549 IEM_MC_LOCAL(uint64_t, u64Value);
10550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10551 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10553 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10554 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10555 IEM_MC_ADVANCE_RIP_AND_FINISH();
10556 IEM_MC_END();
10557 break;
10558
10559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10560 }
10561 }
10562}
10563
10564
10565/** Opcode 0x0f 0xb7. */
10566FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10567{
10568 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10569 IEMOP_HLP_MIN_386();
10570
10571 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10572
10573 /** @todo Not entirely sure how the operand size prefix is handled here,
10574 * assuming that it will be ignored. Would be nice to have a few
10575 * test for this. */
10576
10577 /** @todo There should be no difference in the behaviour whether REX.W is
10578 * present or not... */
10579
10580 /*
10581 * If rm is denoting a register, no more instruction bytes.
10582 */
10583 if (IEM_IS_MODRM_REG_MODE(bRm))
10584 {
10585 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10586 {
10587 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10589 IEM_MC_LOCAL(uint32_t, u32Value);
10590 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10591 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10592 IEM_MC_ADVANCE_RIP_AND_FINISH();
10593 IEM_MC_END();
10594 }
10595 else
10596 {
10597 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10599 IEM_MC_LOCAL(uint64_t, u64Value);
10600 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10601 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10602 IEM_MC_ADVANCE_RIP_AND_FINISH();
10603 IEM_MC_END();
10604 }
10605 }
10606 else
10607 {
10608 /*
10609 * We're loading a register from memory.
10610 */
10611 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10612 {
10613 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10614 IEM_MC_LOCAL(uint32_t, u32Value);
10615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10618 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10619 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10620 IEM_MC_ADVANCE_RIP_AND_FINISH();
10621 IEM_MC_END();
10622 }
10623 else
10624 {
10625 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10626 IEM_MC_LOCAL(uint64_t, u64Value);
10627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10630 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10631 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10632 IEM_MC_ADVANCE_RIP_AND_FINISH();
10633 IEM_MC_END();
10634 }
10635 }
10636}
10637
10638
10639/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10640FNIEMOP_UD_STUB(iemOp_jmpe);
10641
10642
10643/**
10644 * @opcode 0xb8
10645 * @oppfx 0xf3
10646 * @opflmodify cf,pf,af,zf,sf,of
10647 * @opflclear cf,pf,af,sf,of
10648 */
10649FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10650{
10651 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10652 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10653 return iemOp_InvalidNeedRM(pVCpu);
10654#ifndef TST_IEM_CHECK_MC
10655# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10656 static const IEMOPBINSIZES s_Native =
10657 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10658# endif
10659 static const IEMOPBINSIZES s_Fallback =
10660 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10661#endif
10662 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10663 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10664 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, popcnt, 0);
10665}
10666
10667
10668/**
10669 * @opcode 0xb9
10670 * @opinvalid intel-modrm
10671 * @optest ->
10672 */
10673FNIEMOP_DEF(iemOp_Grp10)
10674{
10675 /*
10676 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10677 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10678 */
10679 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10680 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10681 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10682}
10683
10684
10685/**
10686 * Body for group 8 bit instruction.
10687 */
10688#define IEMOP_BODY_BIT_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10689 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10690 \
10691 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10692 { \
10693 /* register destination. */ \
10694 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10695 \
10696 switch (pVCpu->iem.s.enmEffOpSize) \
10697 { \
10698 case IEMMODE_16BIT: \
10699 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10701 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10702 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10703 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10704 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10705 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10706 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10707 \
10708 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10709 IEM_MC_END(); \
10710 break; \
10711 \
10712 case IEMMODE_32BIT: \
10713 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10715 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10716 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10717 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10718 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10719 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10720 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10721 \
10722 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
10723 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10724 IEM_MC_END(); \
10725 break; \
10726 \
10727 case IEMMODE_64BIT: \
10728 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10730 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10731 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10732 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10733 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10734 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10735 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10736 \
10737 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10738 IEM_MC_END(); \
10739 break; \
10740 \
10741 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10742 } \
10743 } \
10744 else \
10745 { \
10746 /* memory destination. */ \
10747 /** @todo test negative bit offsets! */ \
10748 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
10749 { \
10750 switch (pVCpu->iem.s.enmEffOpSize) \
10751 { \
10752 case IEMMODE_16BIT: \
10753 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10756 \
10757 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10758 IEMOP_HLP_DONE_DECODING(); \
10759 \
10760 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10761 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10762 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10763 \
10764 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10765 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10766 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10767 \
10768 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10769 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10770 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10771 IEM_MC_END(); \
10772 break; \
10773 \
10774 case IEMMODE_32BIT: \
10775 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10778 \
10779 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10780 IEMOP_HLP_DONE_DECODING(); \
10781 \
10782 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10783 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10784 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10785 \
10786 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10787 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10788 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10789 \
10790 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10791 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10792 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10793 IEM_MC_END(); \
10794 break; \
10795 \
10796 case IEMMODE_64BIT: \
10797 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10798 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10800 \
10801 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10802 IEMOP_HLP_DONE_DECODING(); \
10803 \
10804 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10805 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10806 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10807 \
10808 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10809 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10810 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10811 \
10812 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10813 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10814 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10815 IEM_MC_END(); \
10816 break; \
10817 \
10818 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10819 } \
10820 } \
10821 else \
10822 { \
10823 (void)0
10824/* Separate macro to work around parsing issue in IEMAllInstPython.py */
10825#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10826 switch (pVCpu->iem.s.enmEffOpSize) \
10827 { \
10828 case IEMMODE_16BIT: \
10829 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10832 \
10833 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10834 IEMOP_HLP_DONE_DECODING(); \
10835 \
10836 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10837 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10838 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10839 \
10840 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10841 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10842 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU16, fEFlagsIn, pu16Dst, u16Src); \
10843 \
10844 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10845 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10846 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10847 IEM_MC_END(); \
10848 break; \
10849 \
10850 case IEMMODE_32BIT: \
10851 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10854 \
10855 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10856 IEMOP_HLP_DONE_DECODING(); \
10857 \
10858 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10859 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10860 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10861 \
10862 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10863 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10864 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU32, fEFlagsIn, pu32Dst, u32Src); \
10865 \
10866 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10867 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10868 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10869 IEM_MC_END(); \
10870 break; \
10871 \
10872 case IEMMODE_64BIT: \
10873 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10876 \
10877 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10878 IEMOP_HLP_DONE_DECODING(); \
10879 \
10880 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10881 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10882 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10883 \
10884 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10885 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10886 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU64, fEFlagsIn, pu64Dst, u64Src); \
10887 \
10888 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10889 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10890 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10891 IEM_MC_END(); \
10892 break; \
10893 \
10894 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10895 } \
10896 } \
10897 } \
10898 (void)0
10899
10900/* Read-only version (bt) */
10901#define IEMOP_BODY_BIT_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10902 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10903 \
10904 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10905 { \
10906 /* register destination. */ \
10907 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10908 \
10909 switch (pVCpu->iem.s.enmEffOpSize) \
10910 { \
10911 case IEMMODE_16BIT: \
10912 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10914 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
10915 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10916 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10917 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10918 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10919 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10920 \
10921 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10922 IEM_MC_END(); \
10923 break; \
10924 \
10925 case IEMMODE_32BIT: \
10926 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10928 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
10929 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10930 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10931 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10932 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10933 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10934 \
10935 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10936 IEM_MC_END(); \
10937 break; \
10938 \
10939 case IEMMODE_64BIT: \
10940 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10942 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
10943 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10944 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10945 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10946 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10947 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10948 \
10949 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10950 IEM_MC_END(); \
10951 break; \
10952 \
10953 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10954 } \
10955 } \
10956 else \
10957 { \
10958 /* memory destination. */ \
10959 /** @todo test negative bit offsets! */ \
10960 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10961 { \
10962 switch (pVCpu->iem.s.enmEffOpSize) \
10963 { \
10964 case IEMMODE_16BIT: \
10965 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10968 \
10969 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10970 IEMOP_HLP_DONE_DECODING(); \
10971 \
10972 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10973 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
10974 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10975 \
10976 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10977 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10978 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10979 \
10980 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
10981 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10982 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10983 IEM_MC_END(); \
10984 break; \
10985 \
10986 case IEMMODE_32BIT: \
10987 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10988 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10990 \
10991 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10992 IEMOP_HLP_DONE_DECODING(); \
10993 \
10994 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10995 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
10996 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10997 \
10998 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10999 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
11000 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
11001 \
11002 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11003 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11004 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11005 IEM_MC_END(); \
11006 break; \
11007 \
11008 case IEMMODE_64BIT: \
11009 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11012 \
11013 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11014 IEMOP_HLP_DONE_DECODING(); \
11015 \
11016 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11017 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
11018 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11019 \
11020 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11021 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
11022 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
11023 \
11024 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11025 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11026 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11027 IEM_MC_END(); \
11028 break; \
11029 \
11030 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11031 } \
11032 } \
11033 else \
11034 { \
11035 IEMOP_HLP_DONE_DECODING(); \
11036 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
11037 } \
11038 } \
11039 (void)0
11040
11041
11042/**
11043 * @opmaps grp8
11044 * @opcode /4
11045 * @oppfx n/a
11046 * @opflclass bitmap
11047 */
11048FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
11049{
11050 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
11051 IEMOP_BODY_BIT_Ev_Ib_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
11052}
11053
11054
11055/**
11056 * @opmaps grp8
11057 * @opcode /5
11058 * @oppfx n/a
11059 * @opflclass bitmap
11060 */
11061FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
11062{
11063 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
11064 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
11065 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
11066}
11067
11068
11069/**
11070 * @opmaps grp8
11071 * @opcode /6
11072 * @oppfx n/a
11073 * @opflclass bitmap
11074 */
11075FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
11076{
11077 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
11078 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
11079 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
11080}
11081
11082
11083/**
11084 * @opmaps grp8
11085 * @opcode /7
11086 * @oppfx n/a
11087 * @opflclass bitmap
11088 */
11089FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
11090{
11091 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
11092 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11093 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11094}
11095
11096
11097/** Opcode 0x0f 0xba. */
11098FNIEMOP_DEF(iemOp_Grp8)
11099{
11100 IEMOP_HLP_MIN_386();
11101 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11102 switch (IEM_GET_MODRM_REG_8(bRm))
11103 {
11104 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
11105 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
11106 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
11107 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
11108
11109 case 0: case 1: case 2: case 3:
11110 /* Both AMD and Intel want full modr/m decoding and imm8. */
11111 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
11112
11113 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11114 }
11115}
11116
11117
11118/**
11119 * @opcode 0xbb
11120 * @oppfx n/a
11121 * @opflclass bitmap
11122 */
11123FNIEMOP_DEF(iemOp_btc_Ev_Gv)
11124{
11125 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
11126 IEMOP_HLP_MIN_386();
11127 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11128 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11129}
11130
11131
11132/**
11133 * Body for BSF and BSR instructions.
11134 *
11135 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
11136 * the destination register, which means that for 32-bit operations the high
11137 * bits must be left alone.
11138 *
11139 * @param pImpl Pointer to the instruction implementation (assembly).
11140 */
11141#define IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl) \
11142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
11143 \
11144 /* \
11145 * If rm is denoting a register, no more instruction bytes. \
11146 */ \
11147 if (IEM_IS_MODRM_REG_MODE(bRm)) \
11148 { \
11149 switch (pVCpu->iem.s.enmEffOpSize) \
11150 { \
11151 case IEMMODE_16BIT: \
11152 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11154 \
11155 IEM_MC_ARG(uint16_t, u16Src, 2); \
11156 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11157 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
11158 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11159 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11160 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
11161 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11162 \
11163 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11164 IEM_MC_END(); \
11165 break; \
11166 \
11167 case IEMMODE_32BIT: \
11168 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11170 \
11171 IEM_MC_ARG(uint32_t, u32Src, 2); \
11172 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11173 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
11174 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11175 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11176 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
11177 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11178 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11179 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11180 } IEM_MC_ENDIF(); \
11181 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11182 IEM_MC_END(); \
11183 break; \
11184 \
11185 case IEMMODE_64BIT: \
11186 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11188 \
11189 IEM_MC_ARG(uint64_t, u64Src, 2); \
11190 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11191 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
11192 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11193 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11194 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
11195 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11196 \
11197 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11198 IEM_MC_END(); \
11199 break; \
11200 \
11201 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11202 } \
11203 } \
11204 else \
11205 { \
11206 /* \
11207 * We're accessing memory. \
11208 */ \
11209 switch (pVCpu->iem.s.enmEffOpSize) \
11210 { \
11211 case IEMMODE_16BIT: \
11212 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11216 \
11217 IEM_MC_ARG(uint16_t, u16Src, 2); \
11218 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11219 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
11220 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11221 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11222 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
11223 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11224 \
11225 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11226 IEM_MC_END(); \
11227 break; \
11228 \
11229 case IEMMODE_32BIT: \
11230 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11234 \
11235 IEM_MC_ARG(uint32_t, u32Src, 2); \
11236 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11237 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
11238 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11239 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11240 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
11241 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11242 \
11243 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11244 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11245 } IEM_MC_ENDIF(); \
11246 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11247 IEM_MC_END(); \
11248 break; \
11249 \
11250 case IEMMODE_64BIT: \
11251 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11255 \
11256 IEM_MC_ARG(uint64_t, u64Src, 2); \
11257 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11258 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
11259 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11260 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11261 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
11262 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11263 \
11264 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11265 IEM_MC_END(); \
11266 break; \
11267 \
11268 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11269 } \
11270 } (void)0
11271
11272
11273/**
11274 * @opcode 0xbc
11275 * @oppfx !0xf3
11276 * @opfltest cf,pf,af,sf,of
11277 * @opflmodify cf,pf,af,zf,sf,of
11278 * @opflundef cf,pf,af,sf,of
11279 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11280 * document them as inputs. Sigh.
11281 */
11282FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11283{
11284 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11285 IEMOP_HLP_MIN_386();
11286 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11287 PCIEMOPBINSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags);
11288 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11289}
11290
11291
11292/**
11293 * @opcode 0xbc
11294 * @oppfx 0xf3
11295 * @opfltest pf,af,sf,of
11296 * @opflmodify cf,pf,af,zf,sf,of
11297 * @opflundef pf,af,sf,of
11298 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11299 * document them as inputs. Sigh.
11300 */
11301FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11302{
11303 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11304 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11305 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11306
11307#ifndef TST_IEM_CHECK_MC
11308 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11309 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11310 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11311 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11312 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11313 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11314 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11315 {
11316 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11317 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11318 };
11319#endif
11320 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11321 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11322 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11323 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11324 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, tzcnt, 0);
11325}
11326
11327
11328/**
11329 * @opcode 0xbd
11330 * @oppfx !0xf3
11331 * @opfltest cf,pf,af,sf,of
11332 * @opflmodify cf,pf,af,zf,sf,of
11333 * @opflundef cf,pf,af,sf,of
11334 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11335 * document them as inputs. Sigh.
11336 */
11337FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11338{
11339 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11340 IEMOP_HLP_MIN_386();
11341 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11342 PCIEMOPBINSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags);
11343 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11344}
11345
11346
11347/**
11348 * @opcode 0xbd
11349 * @oppfx 0xf3
11350 * @opfltest pf,af,sf,of
11351 * @opflmodify cf,pf,af,zf,sf,of
11352 * @opflundef pf,af,sf,of
11353 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11354 * document them as inputs. Sigh.
11355 */
11356FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11357{
11358 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAbm)
11359 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11360 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11361
11362#ifndef TST_IEM_CHECK_MC
11363 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11364 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11365 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11366 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11367 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11368 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11369 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11370 {
11371 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11372 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11373 };
11374#endif
11375 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11376 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11377 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11379 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, lzcnt, 0);
11380}
11381
11382
11383
11384/** Opcode 0x0f 0xbe. */
11385FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11386{
11387 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11388 IEMOP_HLP_MIN_386();
11389
11390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11391
11392 /*
11393 * If rm is denoting a register, no more instruction bytes.
11394 */
11395 if (IEM_IS_MODRM_REG_MODE(bRm))
11396 {
11397 switch (pVCpu->iem.s.enmEffOpSize)
11398 {
11399 case IEMMODE_16BIT:
11400 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11402 IEM_MC_LOCAL(uint16_t, u16Value);
11403 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11404 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11405 IEM_MC_ADVANCE_RIP_AND_FINISH();
11406 IEM_MC_END();
11407 break;
11408
11409 case IEMMODE_32BIT:
11410 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11412 IEM_MC_LOCAL(uint32_t, u32Value);
11413 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11414 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11415 IEM_MC_ADVANCE_RIP_AND_FINISH();
11416 IEM_MC_END();
11417 break;
11418
11419 case IEMMODE_64BIT:
11420 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11422 IEM_MC_LOCAL(uint64_t, u64Value);
11423 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11424 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11425 IEM_MC_ADVANCE_RIP_AND_FINISH();
11426 IEM_MC_END();
11427 break;
11428
11429 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11430 }
11431 }
11432 else
11433 {
11434 /*
11435 * We're loading a register from memory.
11436 */
11437 switch (pVCpu->iem.s.enmEffOpSize)
11438 {
11439 case IEMMODE_16BIT:
11440 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11441 IEM_MC_LOCAL(uint16_t, u16Value);
11442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11445 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11446 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11447 IEM_MC_ADVANCE_RIP_AND_FINISH();
11448 IEM_MC_END();
11449 break;
11450
11451 case IEMMODE_32BIT:
11452 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11453 IEM_MC_LOCAL(uint32_t, u32Value);
11454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11455 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11457 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11458 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11459 IEM_MC_ADVANCE_RIP_AND_FINISH();
11460 IEM_MC_END();
11461 break;
11462
11463 case IEMMODE_64BIT:
11464 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11465 IEM_MC_LOCAL(uint64_t, u64Value);
11466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11469 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11470 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11471 IEM_MC_ADVANCE_RIP_AND_FINISH();
11472 IEM_MC_END();
11473 break;
11474
11475 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11476 }
11477 }
11478}
11479
11480
11481/** Opcode 0x0f 0xbf. */
11482FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11483{
11484 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11485 IEMOP_HLP_MIN_386();
11486
11487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11488
11489 /** @todo Not entirely sure how the operand size prefix is handled here,
11490 * assuming that it will be ignored. Would be nice to have a few
11491 * test for this. */
11492 /*
11493 * If rm is denoting a register, no more instruction bytes.
11494 */
11495 if (IEM_IS_MODRM_REG_MODE(bRm))
11496 {
11497 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11498 {
11499 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11501 IEM_MC_LOCAL(uint32_t, u32Value);
11502 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11503 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11504 IEM_MC_ADVANCE_RIP_AND_FINISH();
11505 IEM_MC_END();
11506 }
11507 else
11508 {
11509 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11511 IEM_MC_LOCAL(uint64_t, u64Value);
11512 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11513 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11514 IEM_MC_ADVANCE_RIP_AND_FINISH();
11515 IEM_MC_END();
11516 }
11517 }
11518 else
11519 {
11520 /*
11521 * We're loading a register from memory.
11522 */
11523 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11524 {
11525 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11526 IEM_MC_LOCAL(uint32_t, u32Value);
11527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11530 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11531 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11532 IEM_MC_ADVANCE_RIP_AND_FINISH();
11533 IEM_MC_END();
11534 }
11535 else
11536 {
11537 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11538 IEM_MC_LOCAL(uint64_t, u64Value);
11539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11542 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11543 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11544 IEM_MC_ADVANCE_RIP_AND_FINISH();
11545 IEM_MC_END();
11546 }
11547 }
11548}
11549
11550
11551/**
11552 * @opcode 0xc0
11553 * @opflclass arithmetic
11554 */
11555FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11556{
11557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11558 IEMOP_HLP_MIN_486();
11559 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11560
11561 /*
11562 * If rm is denoting a register, no more instruction bytes.
11563 */
11564 if (IEM_IS_MODRM_REG_MODE(bRm))
11565 {
11566 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11568 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11569 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11570 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11571
11572 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11573 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11574 IEM_MC_REF_EFLAGS(pEFlags);
11575 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11576
11577 IEM_MC_ADVANCE_RIP_AND_FINISH();
11578 IEM_MC_END();
11579 }
11580 else
11581 {
11582 /*
11583 * We're accessing memory.
11584 */
11585#define IEMOP_BODY_XADD_BYTE(a_fnWorker, a_Type) \
11586 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11587 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11589 IEMOP_HLP_DONE_DECODING(); \
11590 \
11591 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11592 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11593 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11594 \
11595 IEM_MC_LOCAL(uint8_t, u8RegCopy); \
11596 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11597 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, u8RegCopy, 1); \
11598 \
11599 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11600 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker, pu8Dst, pu8Reg, pEFlags); \
11601 \
11602 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11603 IEM_MC_COMMIT_EFLAGS(EFlags); \
11604 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy); \
11605 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11606 IEM_MC_END()
11607 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11608 {
11609 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8,RW);
11610 }
11611 else
11612 {
11613 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8_locked,ATOMIC);
11614 }
11615 }
11616}
11617
11618
11619/**
11620 * @opcode 0xc1
11621 * @opflclass arithmetic
11622 */
11623FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11624{
11625 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11626 IEMOP_HLP_MIN_486();
11627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11628
11629 /*
11630 * If rm is denoting a register, no more instruction bytes.
11631 */
11632 if (IEM_IS_MODRM_REG_MODE(bRm))
11633 {
11634 switch (pVCpu->iem.s.enmEffOpSize)
11635 {
11636 case IEMMODE_16BIT:
11637 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11639 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11640 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11641 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11642
11643 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11644 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11645 IEM_MC_REF_EFLAGS(pEFlags);
11646 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11647
11648 IEM_MC_ADVANCE_RIP_AND_FINISH();
11649 IEM_MC_END();
11650 break;
11651
11652 case IEMMODE_32BIT:
11653 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11655 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11656 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11657 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11658
11659 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11660 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11661 IEM_MC_REF_EFLAGS(pEFlags);
11662 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11663
11664 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
11665 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11666 IEM_MC_ADVANCE_RIP_AND_FINISH();
11667 IEM_MC_END();
11668 break;
11669
11670 case IEMMODE_64BIT:
11671 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11673 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11674 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11675 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11676
11677 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11678 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11679 IEM_MC_REF_EFLAGS(pEFlags);
11680 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11681
11682 IEM_MC_ADVANCE_RIP_AND_FINISH();
11683 IEM_MC_END();
11684 break;
11685
11686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11687 }
11688 }
11689 else
11690 {
11691 /*
11692 * We're accessing memory.
11693 */
11694#define IEMOP_BODY_XADD_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
11695 do { \
11696 switch (pVCpu->iem.s.enmEffOpSize) \
11697 { \
11698 case IEMMODE_16BIT: \
11699 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11702 IEMOP_HLP_DONE_DECODING(); \
11703 \
11704 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11705 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11706 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11707 \
11708 IEM_MC_LOCAL(uint16_t, u16RegCopy); \
11709 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11710 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, u16RegCopy, 1); \
11711 \
11712 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11713 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker16, pu16Dst, pu16Reg, pEFlags); \
11714 \
11715 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11716 IEM_MC_COMMIT_EFLAGS(EFlags); \
11717 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy); \
11718 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11719 IEM_MC_END(); \
11720 break; \
11721 \
11722 case IEMMODE_32BIT: \
11723 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11726 IEMOP_HLP_DONE_DECODING(); \
11727 \
11728 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11729 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11730 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11731 \
11732 IEM_MC_LOCAL(uint32_t, u32RegCopy); \
11733 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11734 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, u32RegCopy, 1); \
11735 \
11736 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11737 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker32, pu32Dst, pu32Reg, pEFlags); \
11738 \
11739 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11740 IEM_MC_COMMIT_EFLAGS(EFlags); \
11741 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy); \
11742 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11743 IEM_MC_END(); \
11744 break; \
11745 \
11746 case IEMMODE_64BIT: \
11747 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11748 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11750 IEMOP_HLP_DONE_DECODING(); \
11751 \
11752 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11753 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11754 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11755 \
11756 IEM_MC_LOCAL(uint64_t, u64RegCopy); \
11757 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11758 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, u64RegCopy, 1); \
11759 \
11760 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11761 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker64, pu64Dst, pu64Reg, pEFlags); \
11762 \
11763 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11764 IEM_MC_COMMIT_EFLAGS(EFlags); \
11765 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy); \
11766 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11767 IEM_MC_END(); \
11768 break; \
11769 \
11770 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11771 } \
11772 } while (0)
11773
11774 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11775 {
11776 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16, iemAImpl_xadd_u32, iemAImpl_xadd_u64,RW);
11777 }
11778 else
11779 {
11780 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16_locked, iemAImpl_xadd_u32_locked, iemAImpl_xadd_u64_locked,ATOMIC);
11781 }
11782 }
11783}
11784
11785
11786/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11787FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11788{
11789 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11790
11791 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11792 if (IEM_IS_MODRM_REG_MODE(bRm))
11793 {
11794 /*
11795 * XMM, XMM.
11796 */
11797 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11798 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11800 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11801 IEM_MC_LOCAL(X86XMMREG, Dst);
11802 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11803 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11804 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11805 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11806 IEM_MC_PREPARE_SSE_USAGE();
11807 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11808 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpps_u128, pDst, pSrc, bImmArg);
11809 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11810
11811 IEM_MC_ADVANCE_RIP_AND_FINISH();
11812 IEM_MC_END();
11813 }
11814 else
11815 {
11816 /*
11817 * XMM, [mem128].
11818 */
11819 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11820 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11821 IEM_MC_LOCAL(X86XMMREG, Dst);
11822 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11823 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11825
11826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11827 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11828 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11830 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11831 IEM_MC_PREPARE_SSE_USAGE();
11832
11833 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11834 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpps_u128, pDst, pSrc, bImmArg);
11835 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11836
11837 IEM_MC_ADVANCE_RIP_AND_FINISH();
11838 IEM_MC_END();
11839 }
11840}
11841
11842
11843/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11844FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11845{
11846 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11847
11848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11849 if (IEM_IS_MODRM_REG_MODE(bRm))
11850 {
11851 /*
11852 * XMM, XMM.
11853 */
11854 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11855 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11857 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11858 IEM_MC_LOCAL(X86XMMREG, Dst);
11859 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11860 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11861 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11862 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11863 IEM_MC_PREPARE_SSE_USAGE();
11864 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11865 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmppd_u128, pDst, pSrc, bImmArg);
11866 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11867
11868 IEM_MC_ADVANCE_RIP_AND_FINISH();
11869 IEM_MC_END();
11870 }
11871 else
11872 {
11873 /*
11874 * XMM, [mem128].
11875 */
11876 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11877 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11878 IEM_MC_LOCAL(X86XMMREG, Dst);
11879 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11880 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11882
11883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11884 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11885 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11887 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11888 IEM_MC_PREPARE_SSE_USAGE();
11889
11890 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11891 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmppd_u128, pDst, pSrc, bImmArg);
11892 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11893
11894 IEM_MC_ADVANCE_RIP_AND_FINISH();
11895 IEM_MC_END();
11896 }
11897}
11898
11899
11900/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11901FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11902{
11903 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11904
11905 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11906 if (IEM_IS_MODRM_REG_MODE(bRm))
11907 {
11908 /*
11909 * XMM32, XMM32.
11910 */
11911 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11912 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11914 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11915 IEM_MC_LOCAL(X86XMMREG, Dst);
11916 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11917 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11918 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11919 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11920 IEM_MC_PREPARE_SSE_USAGE();
11921 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11922 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpss_u128, pDst, pSrc, bImmArg);
11923 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11924
11925 IEM_MC_ADVANCE_RIP_AND_FINISH();
11926 IEM_MC_END();
11927 }
11928 else
11929 {
11930 /*
11931 * XMM32, [mem32].
11932 */
11933 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11934 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11935 IEM_MC_LOCAL(X86XMMREG, Dst);
11936 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11937 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11938 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11939
11940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11941 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11942 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11944 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11945 IEM_MC_PREPARE_SSE_USAGE();
11946
11947 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
11948 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11949 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpss_u128, pDst, pSrc, bImmArg);
11950 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11951
11952 IEM_MC_ADVANCE_RIP_AND_FINISH();
11953 IEM_MC_END();
11954 }
11955}
11956
11957
11958/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11959FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11960{
11961 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11962
11963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11964 if (IEM_IS_MODRM_REG_MODE(bRm))
11965 {
11966 /*
11967 * XMM64, XMM64.
11968 */
11969 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11970 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11972 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11973 IEM_MC_LOCAL(X86XMMREG, Dst);
11974 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11975 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11976 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11977 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11978 IEM_MC_PREPARE_SSE_USAGE();
11979 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11980 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpsd_u128, pDst, pSrc, bImmArg);
11981 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11982
11983 IEM_MC_ADVANCE_RIP_AND_FINISH();
11984 IEM_MC_END();
11985 }
11986 else
11987 {
11988 /*
11989 * XMM64, [mem64].
11990 */
11991 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11992 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11993 IEM_MC_LOCAL(X86XMMREG, Dst);
11994 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11995 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11996 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11997
11998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11999 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12000 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12002 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12003 IEM_MC_PREPARE_SSE_USAGE();
12004
12005 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
12006 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12007 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpsd_u128, pDst, pSrc, bImmArg);
12008 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
12009
12010 IEM_MC_ADVANCE_RIP_AND_FINISH();
12011 IEM_MC_END();
12012 }
12013}
12014
12015
12016/** Opcode 0x0f 0xc3. */
12017FNIEMOP_DEF(iemOp_movnti_My_Gy)
12018{
12019 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
12020
12021 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12022
12023 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
12024 if (IEM_IS_MODRM_MEM_MODE(bRm))
12025 {
12026 switch (pVCpu->iem.s.enmEffOpSize)
12027 {
12028 case IEMMODE_32BIT:
12029 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
12030 IEM_MC_LOCAL(uint32_t, u32Value);
12031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12032
12033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12035
12036 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12037 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
12038 IEM_MC_ADVANCE_RIP_AND_FINISH();
12039 IEM_MC_END();
12040 break;
12041
12042 case IEMMODE_64BIT:
12043 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
12044 IEM_MC_LOCAL(uint64_t, u64Value);
12045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12046
12047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12049
12050 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12051 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
12052 IEM_MC_ADVANCE_RIP_AND_FINISH();
12053 IEM_MC_END();
12054 break;
12055
12056 case IEMMODE_16BIT:
12057 /** @todo check this form. */
12058 IEMOP_RAISE_INVALID_OPCODE_RET();
12059
12060 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12061 }
12062 }
12063 else
12064 IEMOP_RAISE_INVALID_OPCODE_RET();
12065}
12066
12067
12068/* Opcode 0x66 0x0f 0xc3 - invalid */
12069/* Opcode 0xf3 0x0f 0xc3 - invalid */
12070/* Opcode 0xf2 0x0f 0xc3 - invalid */
12071
12072
12073/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
12074FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
12075{
12076 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12077 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12078 if (IEM_IS_MODRM_REG_MODE(bRm))
12079 {
12080 /*
12081 * Register, register.
12082 */
12083 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12084 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12085 IEM_MC_LOCAL(uint16_t, uValue);
12086
12087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12088 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12089 IEM_MC_PREPARE_FPU_USAGE();
12090 IEM_MC_FPU_TO_MMX_MODE();
12091
12092 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
12093 IEM_MC_STORE_MREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
12094
12095 IEM_MC_ADVANCE_RIP_AND_FINISH();
12096 IEM_MC_END();
12097 }
12098 else
12099 {
12100 /*
12101 * Register, memory.
12102 */
12103 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12105 IEM_MC_LOCAL(uint16_t, uValue);
12106
12107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12108 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12110 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12111 IEM_MC_PREPARE_FPU_USAGE();
12112
12113 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12114 IEM_MC_FPU_TO_MMX_MODE();
12115 IEM_MC_STORE_MREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
12116
12117 IEM_MC_ADVANCE_RIP_AND_FINISH();
12118 IEM_MC_END();
12119 }
12120}
12121
12122
12123/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
12124FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
12125{
12126 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12127 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12128 if (IEM_IS_MODRM_REG_MODE(bRm))
12129 {
12130 /*
12131 * Register, register.
12132 */
12133 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12134 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12136
12137 IEM_MC_LOCAL(uint16_t, uValue);
12138 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12139 IEM_MC_PREPARE_SSE_USAGE();
12140
12141 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
12142 IEM_MC_STORE_XREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
12143 IEM_MC_ADVANCE_RIP_AND_FINISH();
12144 IEM_MC_END();
12145 }
12146 else
12147 {
12148 /*
12149 * Register, memory.
12150 */
12151 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12152 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12153 IEM_MC_LOCAL(uint16_t, uValue);
12154
12155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12156 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12158 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12159 IEM_MC_PREPARE_SSE_USAGE();
12160
12161 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12162 IEM_MC_STORE_XREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
12163 IEM_MC_ADVANCE_RIP_AND_FINISH();
12164 IEM_MC_END();
12165 }
12166}
12167
12168
12169/* Opcode 0xf3 0x0f 0xc4 - invalid */
12170/* Opcode 0xf2 0x0f 0xc4 - invalid */
12171
12172
12173/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
12174FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
12175{
12176 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
12177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12178 if (IEM_IS_MODRM_REG_MODE(bRm))
12179 {
12180 /*
12181 * Greg32, MMX, imm8.
12182 */
12183 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12184 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12186 IEM_MC_LOCAL(uint16_t, uValue);
12187 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12188 IEM_MC_PREPARE_FPU_USAGE();
12189 IEM_MC_FPU_TO_MMX_MODE();
12190 IEM_MC_FETCH_MREG_U16(uValue, IEM_GET_MODRM_RM_8(bRm), bImm & 3);
12191 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
12192 IEM_MC_ADVANCE_RIP_AND_FINISH();
12193 IEM_MC_END();
12194 }
12195 /* No memory operand. */
12196 else
12197 IEMOP_RAISE_INVALID_OPCODE_RET();
12198}
12199
12200
12201/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12202FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12203{
12204 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12205 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12206 if (IEM_IS_MODRM_REG_MODE(bRm))
12207 {
12208 /*
12209 * Greg32, XMM, imm8.
12210 */
12211 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12212 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12214 IEM_MC_LOCAL(uint16_t, uValue);
12215 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12216 IEM_MC_PREPARE_SSE_USAGE();
12217 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
12218 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
12219 IEM_MC_ADVANCE_RIP_AND_FINISH();
12220 IEM_MC_END();
12221 }
12222 /* No memory operand. */
12223 else
12224 IEMOP_RAISE_INVALID_OPCODE_RET();
12225}
12226
12227
12228/* Opcode 0xf3 0x0f 0xc5 - invalid */
12229/* Opcode 0xf2 0x0f 0xc5 - invalid */
12230
12231
12232/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12233FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12234{
12235 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12237 if (IEM_IS_MODRM_REG_MODE(bRm))
12238 {
12239 /*
12240 * XMM, XMM, imm8.
12241 */
12242 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12243 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12245 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12246 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12247 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12248 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12249 IEM_MC_PREPARE_SSE_USAGE();
12250 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12251 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12252 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12253 IEM_MC_ADVANCE_RIP_AND_FINISH();
12254 IEM_MC_END();
12255 }
12256 else
12257 {
12258 /*
12259 * XMM, [mem128], imm8.
12260 */
12261 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12262 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12263 IEM_MC_LOCAL(RTUINT128U, uSrc);
12264 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12265 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12266
12267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12268 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12269 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12271 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12272 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12273
12274 IEM_MC_PREPARE_SSE_USAGE();
12275 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12276 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12277
12278 IEM_MC_ADVANCE_RIP_AND_FINISH();
12279 IEM_MC_END();
12280 }
12281}
12282
12283
12284/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12285FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12286{
12287 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12288 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12289 if (IEM_IS_MODRM_REG_MODE(bRm))
12290 {
12291 /*
12292 * XMM, XMM, imm8.
12293 */
12294 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12295 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12297 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12298 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12299 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12300 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12301 IEM_MC_PREPARE_SSE_USAGE();
12302 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12303 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12304 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12305 IEM_MC_ADVANCE_RIP_AND_FINISH();
12306 IEM_MC_END();
12307 }
12308 else
12309 {
12310 /*
12311 * XMM, [mem128], imm8.
12312 */
12313 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12314 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12315 IEM_MC_LOCAL(RTUINT128U, uSrc);
12316 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12318
12319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12320 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12321 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12323 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12324 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12325
12326 IEM_MC_PREPARE_SSE_USAGE();
12327 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12328 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12329
12330 IEM_MC_ADVANCE_RIP_AND_FINISH();
12331 IEM_MC_END();
12332 }
12333}
12334
12335
12336/* Opcode 0xf3 0x0f 0xc6 - invalid */
12337/* Opcode 0xf2 0x0f 0xc6 - invalid */
12338
12339
12340/**
12341 * @opmaps grp9
12342 * @opcode /1
12343 * @opcodesub !11 mr/reg rex.w=0
12344 * @oppfx n/a
12345 * @opflmodify zf
12346 */
12347FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12348{
12349 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12350#define IEMOP_BODY_CMPXCHG8B(a_fnWorker, a_Type) \
12351 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
12352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12354 IEMOP_HLP_DONE_DECODING_EX(fCmpXchg8b); \
12355 \
12356 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12357 IEM_MC_ARG(uint64_t *, pu64MemDst, 0); \
12358 IEM_MC_MEM_MAP_U64_##a_Type(pu64MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12359 \
12360 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx); \
12361 IEM_MC_FETCH_GREG_PAIR_U32(u64EaxEdx, X86_GREG_xAX, X86_GREG_xDX); \
12362 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EaxEdx, u64EaxEdx, 1); \
12363 \
12364 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx); \
12365 IEM_MC_FETCH_GREG_PAIR_U32(u64EbxEcx, X86_GREG_xBX, X86_GREG_xCX); \
12366 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EbxEcx, u64EbxEcx, 2); \
12367 \
12368 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
12369 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags); \
12370 \
12371 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12372 IEM_MC_COMMIT_EFLAGS(EFlags); \
12373 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12374 IEM_MC_STORE_GREG_PAIR_U32(X86_GREG_xAX, X86_GREG_xDX, u64EaxEdx); \
12375 } IEM_MC_ENDIF(); \
12376 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12377 \
12378 IEM_MC_END()
12379 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12380 {
12381 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b,RW);
12382 }
12383 else
12384 {
12385 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b_locked,ATOMIC);
12386 }
12387}
12388
12389
12390/**
12391 * @opmaps grp9
12392 * @opcode /1
12393 * @opcodesub !11 mr/reg rex.w=1
12394 * @oppfx n/a
12395 * @opflmodify zf
12396 */
12397FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12398{
12399 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12400 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12401 {
12402 /*
12403 * This is hairy, very hairy macro fun. We're walking a fine line
12404 * here to make the code parsable by IEMAllInstPython.py and fit into
12405 * the patterns IEMAllThrdPython.py requires for the code morphing.
12406 */
12407#define BODY_CMPXCHG16B_HEAD(bUnmapInfoStmt, a_Type) \
12408 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
12409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12411 IEMOP_HLP_DONE_DECODING(); \
12412 \
12413 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16); \
12414 bUnmapInfoStmt; \
12415 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0); \
12416 IEM_MC_MEM_MAP_U128_##a_Type(pu128MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12417 \
12418 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx); \
12419 IEM_MC_FETCH_GREG_PAIR_U64(u128RaxRdx, X86_GREG_xAX, X86_GREG_xDX); \
12420 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RaxRdx, u128RaxRdx, 1); \
12421 \
12422 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx); \
12423 IEM_MC_FETCH_GREG_PAIR_U64(u128RbxRcx, X86_GREG_xBX, X86_GREG_xCX); \
12424 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RbxRcx, u128RbxRcx, 2); \
12425 \
12426 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3)
12427
12428#define BODY_CMPXCHG16B_TAIL(a_Type) \
12429 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12430 IEM_MC_COMMIT_EFLAGS(EFlags); \
12431 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12432 IEM_MC_STORE_GREG_PAIR_U64(X86_GREG_xAX, X86_GREG_xDX, u128RaxRdx); \
12433 } IEM_MC_ENDIF(); \
12434 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12435 IEM_MC_END()
12436
12437#ifdef RT_ARCH_AMD64
12438 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12439 {
12440 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12441 {
12442 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12443 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12444 BODY_CMPXCHG16B_TAIL(RW);
12445 }
12446 else
12447 {
12448 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12449 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12450 BODY_CMPXCHG16B_TAIL(ATOMIC);
12451 }
12452 }
12453 else
12454 { /* (see comments in #else case below) */
12455 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12456 {
12457 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12458 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12459 BODY_CMPXCHG16B_TAIL(RW);
12460 }
12461 else
12462 {
12463 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12464 IEM_MC_CALL_CIMPL_5(IEM_CIMPL_F_STATUS_FLAGS,
12465 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12466 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12467 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx,
12468 pEFlags, bUnmapInfo);
12469 IEM_MC_END();
12470 }
12471 }
12472
12473#elif defined(RT_ARCH_ARM64)
12474 /** @todo may require fallback for unaligned accesses... */
12475 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12476 {
12477 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12478 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12479 BODY_CMPXCHG16B_TAIL(RW);
12480 }
12481 else
12482 {
12483 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12484 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12485 BODY_CMPXCHG16B_TAIL(ATOMIC);
12486 }
12487
12488#else
12489 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12490 accesses and not all all atomic, which works fine on in UNI CPU guest
12491 configuration (ignoring DMA). If guest SMP is active we have no choice
12492 but to use a rendezvous callback here. Sigh. */
12493 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12494 {
12495 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12496 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12497 BODY_CMPXCHG16B_TAIL(RW);
12498 }
12499 else
12500 {
12501 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12502 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12503 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12504 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12505 iemCImpl_cmpxchg16b_fallback_rendezvous,
12506 pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12507 IEM_MC_END();
12508 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12509 }
12510#endif
12511
12512#undef BODY_CMPXCHG16B
12513 }
12514 Log(("cmpxchg16b -> #UD\n"));
12515 IEMOP_RAISE_INVALID_OPCODE_RET();
12516}
12517
12518FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12519{
12520 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12521 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12522 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12523}
12524
12525
12526/** Opcode 0x0f 0xc7 11/6. */
12527FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12528{
12529 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12530 IEMOP_RAISE_INVALID_OPCODE_RET();
12531
12532 if (IEM_IS_MODRM_REG_MODE(bRm))
12533 {
12534 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12536 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12537 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12538 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12539 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12540 iemCImpl_rdrand, iReg, enmEffOpSize);
12541 IEM_MC_END();
12542 }
12543 /* Register only. */
12544 else
12545 IEMOP_RAISE_INVALID_OPCODE_RET();
12546}
12547
12548/** Opcode 0x0f 0xc7 !11/6. */
12549#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12550FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12551{
12552 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12553 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12554 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12555 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12556 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12558 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12559 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12560 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12561 IEM_MC_END();
12562}
12563#else
12564FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12565#endif
12566
12567/** Opcode 0x66 0x0f 0xc7 !11/6. */
12568#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12569FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12570{
12571 IEMOP_MNEMONIC(vmclear, "vmclear");
12572 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12573 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12574 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12575 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12577 IEMOP_HLP_DONE_DECODING();
12578 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12579 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12580 IEM_MC_END();
12581}
12582#else
12583FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12584#endif
12585
12586/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12587#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12588FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12589{
12590 IEMOP_MNEMONIC(vmxon, "vmxon");
12591 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12592 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12593 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12595 IEMOP_HLP_DONE_DECODING();
12596 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12597 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12598 IEM_MC_END();
12599}
12600#else
12601FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12602#endif
12603
12604/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12605#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12606FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12607{
12608 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12609 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12610 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12611 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12612 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12614 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12615 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12616 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12617 IEM_MC_END();
12618}
12619#else
12620FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12621#endif
12622
12623/** Opcode 0x0f 0xc7 11/7. */
12624FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12625{
12626 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12627 IEMOP_RAISE_INVALID_OPCODE_RET();
12628
12629 if (IEM_IS_MODRM_REG_MODE(bRm))
12630 {
12631 /* register destination. */
12632 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12634 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12635 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12636 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12637 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12638 iemCImpl_rdseed, iReg, enmEffOpSize);
12639 IEM_MC_END();
12640 }
12641 /* Register only. */
12642 else
12643 IEMOP_RAISE_INVALID_OPCODE_RET();
12644}
12645
12646/**
12647 * Group 9 jump table for register variant.
12648 */
12649IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12650{ /* pfx: none, 066h, 0f3h, 0f2h */
12651 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12652 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12653 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12654 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12655 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12656 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12657 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12658 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12659};
12660AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12661
12662
12663/**
12664 * Group 9 jump table for memory variant.
12665 */
12666IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12667{ /* pfx: none, 066h, 0f3h, 0f2h */
12668 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12669 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12670 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12671 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12672 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12673 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12674 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12675 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12676};
12677AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12678
12679
12680/** Opcode 0x0f 0xc7. */
12681FNIEMOP_DEF(iemOp_Grp9)
12682{
12683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12684 if (IEM_IS_MODRM_REG_MODE(bRm))
12685 /* register, register */
12686 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12687 + pVCpu->iem.s.idxPrefix], bRm);
12688 /* memory, register */
12689 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12690 + pVCpu->iem.s.idxPrefix], bRm);
12691}
12692
12693
12694/**
12695 * Common 'bswap register' helper.
12696 */
12697FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12698{
12699 switch (pVCpu->iem.s.enmEffOpSize)
12700 {
12701 case IEMMODE_16BIT:
12702 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
12703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12704 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12705 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12706 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12707 IEM_MC_ADVANCE_RIP_AND_FINISH();
12708 IEM_MC_END();
12709 break;
12710
12711 case IEMMODE_32BIT:
12712 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
12713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12714 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12715 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12716 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12717 IEM_MC_CLEAR_HIGH_GREG_U64(iReg);
12718 IEM_MC_ADVANCE_RIP_AND_FINISH();
12719 IEM_MC_END();
12720 break;
12721
12722 case IEMMODE_64BIT:
12723 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
12724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12725 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12726 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12727 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12728 IEM_MC_ADVANCE_RIP_AND_FINISH();
12729 IEM_MC_END();
12730 break;
12731
12732 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12733 }
12734}
12735
12736
12737/** Opcode 0x0f 0xc8. */
12738FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12739{
12740 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12741 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12742 prefix. REX.B is the correct prefix it appears. For a parallel
12743 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12744 IEMOP_HLP_MIN_486();
12745 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12746}
12747
12748
12749/** Opcode 0x0f 0xc9. */
12750FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12751{
12752 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12753 IEMOP_HLP_MIN_486();
12754 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12755}
12756
12757
12758/** Opcode 0x0f 0xca. */
12759FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12760{
12761 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12762 IEMOP_HLP_MIN_486();
12763 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12764}
12765
12766
12767/** Opcode 0x0f 0xcb. */
12768FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12769{
12770 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12771 IEMOP_HLP_MIN_486();
12772 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12773}
12774
12775
12776/** Opcode 0x0f 0xcc. */
12777FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12778{
12779 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12780 IEMOP_HLP_MIN_486();
12781 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12782}
12783
12784
12785/** Opcode 0x0f 0xcd. */
12786FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12787{
12788 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12789 IEMOP_HLP_MIN_486();
12790 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12791}
12792
12793
12794/** Opcode 0x0f 0xce. */
12795FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12796{
12797 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12798 IEMOP_HLP_MIN_486();
12799 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12800}
12801
12802
12803/** Opcode 0x0f 0xcf. */
12804FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12805{
12806 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12807 IEMOP_HLP_MIN_486();
12808 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12809}
12810
12811
12812/* Opcode 0x0f 0xd0 - invalid */
12813
12814
12815/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12816FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12817{
12818 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12819 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12820}
12821
12822
12823/* Opcode 0xf3 0x0f 0xd0 - invalid */
12824
12825
12826/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12827FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12828{
12829 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12830 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12831}
12832
12833
12834
12835/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12836FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12837{
12838 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12839 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12840}
12841
12842/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12843FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12844{
12845 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12846 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12847}
12848
12849/* Opcode 0xf3 0x0f 0xd1 - invalid */
12850/* Opcode 0xf2 0x0f 0xd1 - invalid */
12851
12852/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12853FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12854{
12855 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12856 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12857}
12858
12859
12860/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12861FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12862{
12863 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12864 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12865}
12866
12867
12868/* Opcode 0xf3 0x0f 0xd2 - invalid */
12869/* Opcode 0xf2 0x0f 0xd2 - invalid */
12870
12871/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12872FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12873{
12874 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12875 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12876}
12877
12878
12879/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12880FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12881{
12882 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12883 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12884}
12885
12886
12887/* Opcode 0xf3 0x0f 0xd3 - invalid */
12888/* Opcode 0xf2 0x0f 0xd3 - invalid */
12889
12890
12891/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12892FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12893{
12894 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12895 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
12896}
12897
12898
12899/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12900FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12901{
12902 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12903 SSE2_OPT_BODY_FullFull_To_Full(paddq, iemAImpl_paddq_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
12904}
12905
12906
12907/* Opcode 0xf3 0x0f 0xd4 - invalid */
12908/* Opcode 0xf2 0x0f 0xd4 - invalid */
12909
12910/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12911FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12912{
12913 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12914 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmullw_u64);
12915}
12916
12917/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12918FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12919{
12920 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12921 SSE2_OPT_BODY_FullFull_To_Full(pmullw, iemAImpl_pmullw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
12922}
12923
12924
12925/* Opcode 0xf3 0x0f 0xd5 - invalid */
12926/* Opcode 0xf2 0x0f 0xd5 - invalid */
12927
12928/* Opcode 0x0f 0xd6 - invalid */
12929
12930/**
12931 * @opcode 0xd6
12932 * @oppfx 0x66
12933 * @opcpuid sse2
12934 * @opgroup og_sse2_pcksclr_datamove
12935 * @opxcpttype none
12936 * @optest op1=-1 op2=2 -> op1=2
12937 * @optest op1=0 op2=-42 -> op1=-42
12938 */
12939FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12940{
12941 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12943 if (IEM_IS_MODRM_REG_MODE(bRm))
12944 {
12945 /*
12946 * Register, register.
12947 */
12948 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12950 IEM_MC_LOCAL(uint64_t, uSrc);
12951
12952 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12953 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12954
12955 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12956 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12957
12958 IEM_MC_ADVANCE_RIP_AND_FINISH();
12959 IEM_MC_END();
12960 }
12961 else
12962 {
12963 /*
12964 * Memory, register.
12965 */
12966 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12967 IEM_MC_LOCAL(uint64_t, uSrc);
12968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12969
12970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12972 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12973 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12974
12975 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12976 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12977
12978 IEM_MC_ADVANCE_RIP_AND_FINISH();
12979 IEM_MC_END();
12980 }
12981}
12982
12983
12984/**
12985 * @opcode 0xd6
12986 * @opcodesub 11 mr/reg
12987 * @oppfx f3
12988 * @opcpuid sse2
12989 * @opgroup og_sse2_simdint_datamove
12990 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12991 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12992 */
12993FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12994{
12995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12996 if (IEM_IS_MODRM_REG_MODE(bRm))
12997 {
12998 /*
12999 * Register, register.
13000 */
13001 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13002 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13004 IEM_MC_LOCAL(uint64_t, uSrc);
13005
13006 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13007 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13008 IEM_MC_FPU_TO_MMX_MODE();
13009
13010 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
13011 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
13012
13013 IEM_MC_ADVANCE_RIP_AND_FINISH();
13014 IEM_MC_END();
13015 }
13016
13017 /**
13018 * @opdone
13019 * @opmnemonic udf30fd6mem
13020 * @opcode 0xd6
13021 * @opcodesub !11 mr/reg
13022 * @oppfx f3
13023 * @opunused intel-modrm
13024 * @opcpuid sse
13025 * @optest ->
13026 */
13027 else
13028 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13029}
13030
13031
13032/**
13033 * @opcode 0xd6
13034 * @opcodesub 11 mr/reg
13035 * @oppfx f2
13036 * @opcpuid sse2
13037 * @opgroup og_sse2_simdint_datamove
13038 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13039 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13040 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
13041 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
13042 * @optest op1=-42 op2=0xfedcba9876543210
13043 * -> op1=0xfedcba9876543210 ftw=0xff
13044 */
13045FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
13046{
13047 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13048 if (IEM_IS_MODRM_REG_MODE(bRm))
13049 {
13050 /*
13051 * Register, register.
13052 */
13053 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13054 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13056 IEM_MC_LOCAL(uint64_t, uSrc);
13057
13058 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13059 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13060 IEM_MC_FPU_TO_MMX_MODE();
13061
13062 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
13063 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
13064
13065 IEM_MC_ADVANCE_RIP_AND_FINISH();
13066 IEM_MC_END();
13067 }
13068
13069 /**
13070 * @opdone
13071 * @opmnemonic udf20fd6mem
13072 * @opcode 0xd6
13073 * @opcodesub !11 mr/reg
13074 * @oppfx f2
13075 * @opunused intel-modrm
13076 * @opcpuid sse
13077 * @optest ->
13078 */
13079 else
13080 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13081}
13082
13083
13084/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
13085FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
13086{
13087 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13088 /* Docs says register only. */
13089 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13090 {
13091 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13092 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
13093 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13095 IEM_MC_ARG(uint64_t *, puDst, 0);
13096 IEM_MC_ARG(uint64_t const *, puSrc, 1);
13097 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13098 IEM_MC_PREPARE_FPU_USAGE();
13099 IEM_MC_FPU_TO_MMX_MODE();
13100
13101 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13102 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
13103 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
13104
13105 IEM_MC_ADVANCE_RIP_AND_FINISH();
13106 IEM_MC_END();
13107 }
13108 else
13109 IEMOP_RAISE_INVALID_OPCODE_RET();
13110}
13111
13112
13113/** Opcode 0x66 0x0f 0xd7 - */
13114FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
13115{
13116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13117 /* Docs says register only. */
13118 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13119 {
13120 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13121 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
13122 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13124 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13125 IEM_MC_PREPARE_SSE_USAGE();
13126 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
13127 IEM_MC_NATIVE_EMIT_2(iemNativeEmit_pmovmskb_rr_u128, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
13128 } IEM_MC_NATIVE_ELSE() {
13129 IEM_MC_ARG(uint64_t *, puDst, 0);
13130 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
13131 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13132 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
13133 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
13134 } IEM_MC_NATIVE_ENDIF();
13135 IEM_MC_ADVANCE_RIP_AND_FINISH();
13136 IEM_MC_END();
13137 }
13138 else
13139 IEMOP_RAISE_INVALID_OPCODE_RET();
13140}
13141
13142
13143/* Opcode 0xf3 0x0f 0xd7 - invalid */
13144/* Opcode 0xf2 0x0f 0xd7 - invalid */
13145
13146
13147/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
13148FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
13149{
13150 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13151 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubusb_u64);
13152}
13153
13154
13155/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
13156FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
13157{
13158 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13159 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubusb_u128);
13160}
13161
13162
13163/* Opcode 0xf3 0x0f 0xd8 - invalid */
13164/* Opcode 0xf2 0x0f 0xd8 - invalid */
13165
13166/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13167FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13168{
13169 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13170 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubusw_u64);
13171}
13172
13173
13174/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13175FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13176{
13177 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13178 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubusw_u128);
13179}
13180
13181
13182/* Opcode 0xf3 0x0f 0xd9 - invalid */
13183/* Opcode 0xf2 0x0f 0xd9 - invalid */
13184
13185/** Opcode 0x0f 0xda - pminub Pq, Qq */
13186FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13187{
13188 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13189 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pminub_u64);
13190}
13191
13192
13193/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13194FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13195{
13196 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13197 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pminub_u128);
13198}
13199
13200/* Opcode 0xf3 0x0f 0xda - invalid */
13201/* Opcode 0xf2 0x0f 0xda - invalid */
13202
13203/** Opcode 0x0f 0xdb - pand Pq, Qq */
13204FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13205{
13206 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13207 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pand_u64);
13208}
13209
13210
13211/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13212FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13213{
13214 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13215 SSE2_OPT_BODY_FullFull_To_Full(pand, iemAImpl_pand_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13216}
13217
13218
13219/* Opcode 0xf3 0x0f 0xdb - invalid */
13220/* Opcode 0xf2 0x0f 0xdb - invalid */
13221
13222/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13223FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13224{
13225 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13226 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddusb_u64);
13227}
13228
13229
13230/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13231FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13232{
13233 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13234 SSE2_OPT_BODY_FullFull_To_Full(paddusb, iemAImpl_paddusb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13235}
13236
13237
13238/* Opcode 0xf3 0x0f 0xdc - invalid */
13239/* Opcode 0xf2 0x0f 0xdc - invalid */
13240
13241/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13242FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13243{
13244 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13245 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddusw_u64);
13246}
13247
13248
13249/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13250FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13251{
13252 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13253 SSE2_OPT_BODY_FullFull_To_Full(paddusw, iemAImpl_paddusw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13254}
13255
13256
13257/* Opcode 0xf3 0x0f 0xdd - invalid */
13258/* Opcode 0xf2 0x0f 0xdd - invalid */
13259
13260/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13261FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13262{
13263 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13264 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmaxub_u64);
13265}
13266
13267
13268/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13269FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13270{
13271 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13272 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaxub_u128);
13273}
13274
13275/* Opcode 0xf3 0x0f 0xde - invalid */
13276/* Opcode 0xf2 0x0f 0xde - invalid */
13277
13278
13279/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13280FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13281{
13282 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13283 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pandn_u64);
13284}
13285
13286
13287/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13288FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13289{
13290 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13291 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pandn_u128);
13292}
13293
13294
13295/* Opcode 0xf3 0x0f 0xdf - invalid */
13296/* Opcode 0xf2 0x0f 0xdf - invalid */
13297
13298/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13299FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13300{
13301 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13302 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13303}
13304
13305
13306/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13307FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13308{
13309 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13310 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13311}
13312
13313
13314/* Opcode 0xf3 0x0f 0xe0 - invalid */
13315/* Opcode 0xf2 0x0f 0xe0 - invalid */
13316
13317/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13318FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13319{
13320 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13321 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13322}
13323
13324
13325/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13326FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13327{
13328 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13329 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13330}
13331
13332
13333/* Opcode 0xf3 0x0f 0xe1 - invalid */
13334/* Opcode 0xf2 0x0f 0xe1 - invalid */
13335
13336/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13337FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13338{
13339 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13340 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13341}
13342
13343
13344/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13345FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13346{
13347 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13348 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13349}
13350
13351
13352/* Opcode 0xf3 0x0f 0xe2 - invalid */
13353/* Opcode 0xf2 0x0f 0xe2 - invalid */
13354
13355/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13356FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13357{
13358 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13359 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13360}
13361
13362
13363/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13364FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13365{
13366 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13367 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13368}
13369
13370
13371/* Opcode 0xf3 0x0f 0xe3 - invalid */
13372/* Opcode 0xf2 0x0f 0xe3 - invalid */
13373
13374/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13375FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13376{
13377 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13378 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13379}
13380
13381
13382/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13383FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13384{
13385 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13386 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13387}
13388
13389
13390/* Opcode 0xf3 0x0f 0xe4 - invalid */
13391/* Opcode 0xf2 0x0f 0xe4 - invalid */
13392
13393/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13394FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13395{
13396 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13397 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmulhw_u64);
13398}
13399
13400
13401/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13402FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13403{
13404 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13405 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhw_u128);
13406}
13407
13408
13409/* Opcode 0xf3 0x0f 0xe5 - invalid */
13410/* Opcode 0xf2 0x0f 0xe5 - invalid */
13411/* Opcode 0x0f 0xe6 - invalid */
13412
13413
13414/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13415FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13416{
13417 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13418 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13419}
13420
13421
13422/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13423FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13424{
13425 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13426 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13427}
13428
13429
13430/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13431FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13432{
13433 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13434 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13435}
13436
13437
13438/**
13439 * @opcode 0xe7
13440 * @opcodesub !11 mr/reg
13441 * @oppfx none
13442 * @opcpuid sse
13443 * @opgroup og_sse1_cachect
13444 * @opxcpttype none
13445 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13446 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13447 */
13448FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13449{
13450 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13451 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13452 if (IEM_IS_MODRM_MEM_MODE(bRm))
13453 {
13454 /* Register, memory. */
13455 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13456 IEM_MC_LOCAL(uint64_t, uSrc);
13457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13458
13459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13461 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13462 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13463 IEM_MC_FPU_TO_MMX_MODE();
13464
13465 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13466 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13467
13468 IEM_MC_ADVANCE_RIP_AND_FINISH();
13469 IEM_MC_END();
13470 }
13471 /**
13472 * @opdone
13473 * @opmnemonic ud0fe7reg
13474 * @opcode 0xe7
13475 * @opcodesub 11 mr/reg
13476 * @oppfx none
13477 * @opunused immediate
13478 * @opcpuid sse
13479 * @optest ->
13480 */
13481 else
13482 IEMOP_RAISE_INVALID_OPCODE_RET();
13483}
13484
13485/**
13486 * @opcode 0xe7
13487 * @opcodesub !11 mr/reg
13488 * @oppfx 0x66
13489 * @opcpuid sse2
13490 * @opgroup og_sse2_cachect
13491 * @opxcpttype 1
13492 * @optest op1=-1 op2=2 -> op1=2
13493 * @optest op1=0 op2=-42 -> op1=-42
13494 */
13495FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13496{
13497 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13498 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13499 if (IEM_IS_MODRM_MEM_MODE(bRm))
13500 {
13501 /* Register, memory. */
13502 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13503 IEM_MC_LOCAL(RTUINT128U, uSrc);
13504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13505
13506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13508 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13509 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13510
13511 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13512 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13513
13514 IEM_MC_ADVANCE_RIP_AND_FINISH();
13515 IEM_MC_END();
13516 }
13517
13518 /**
13519 * @opdone
13520 * @opmnemonic ud660fe7reg
13521 * @opcode 0xe7
13522 * @opcodesub 11 mr/reg
13523 * @oppfx 0x66
13524 * @opunused immediate
13525 * @opcpuid sse
13526 * @optest ->
13527 */
13528 else
13529 IEMOP_RAISE_INVALID_OPCODE_RET();
13530}
13531
13532/* Opcode 0xf3 0x0f 0xe7 - invalid */
13533/* Opcode 0xf2 0x0f 0xe7 - invalid */
13534
13535
13536/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13537FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13538{
13539 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13540 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubsb_u64);
13541}
13542
13543
13544/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13545FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13546{
13547 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13548 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubsb_u128);
13549}
13550
13551
13552/* Opcode 0xf3 0x0f 0xe8 - invalid */
13553/* Opcode 0xf2 0x0f 0xe8 - invalid */
13554
13555/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13556FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13557{
13558 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13559 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubsw_u64);
13560}
13561
13562
13563/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13564FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13565{
13566 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13567 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubsw_u128);
13568}
13569
13570
13571/* Opcode 0xf3 0x0f 0xe9 - invalid */
13572/* Opcode 0xf2 0x0f 0xe9 - invalid */
13573
13574
13575/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13576FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13577{
13578 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13579 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pminsw_u64);
13580}
13581
13582
13583/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13584FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13585{
13586 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13587 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pminsw_u128);
13588}
13589
13590
13591/* Opcode 0xf3 0x0f 0xea - invalid */
13592/* Opcode 0xf2 0x0f 0xea - invalid */
13593
13594
13595/** Opcode 0x0f 0xeb - por Pq, Qq */
13596FNIEMOP_DEF(iemOp_por_Pq_Qq)
13597{
13598 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13599 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_por_u64);
13600}
13601
13602
13603/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13604FNIEMOP_DEF(iemOp_por_Vx_Wx)
13605{
13606 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13607 SSE2_OPT_BODY_FullFull_To_Full(por, iemAImpl_por_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13608}
13609
13610
13611/* Opcode 0xf3 0x0f 0xeb - invalid */
13612/* Opcode 0xf2 0x0f 0xeb - invalid */
13613
13614/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13615FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13616{
13617 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13618 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddsb_u64);
13619}
13620
13621
13622/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13623FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13624{
13625 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13626 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddsb_u128);
13627}
13628
13629
13630/* Opcode 0xf3 0x0f 0xec - invalid */
13631/* Opcode 0xf2 0x0f 0xec - invalid */
13632
13633/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13634FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13635{
13636 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13637 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddsw_u64);
13638}
13639
13640
13641/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13642FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13643{
13644 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13645 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddsw_u128);
13646}
13647
13648
13649/* Opcode 0xf3 0x0f 0xed - invalid */
13650/* Opcode 0xf2 0x0f 0xed - invalid */
13651
13652
13653/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13654FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13655{
13656 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13657 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13658}
13659
13660
13661/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13662FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13663{
13664 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13665 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13666}
13667
13668
13669/* Opcode 0xf3 0x0f 0xee - invalid */
13670/* Opcode 0xf2 0x0f 0xee - invalid */
13671
13672
13673/** Opcode 0x0f 0xef - pxor Pq, Qq */
13674FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13675{
13676 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13677 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pxor_u64);
13678}
13679
13680
13681/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13682FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13683{
13684 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13685 SSE2_OPT_BODY_FullFull_To_Full(pxor, iemAImpl_pxor_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13686}
13687
13688
13689/* Opcode 0xf3 0x0f 0xef - invalid */
13690/* Opcode 0xf2 0x0f 0xef - invalid */
13691
13692/* Opcode 0x0f 0xf0 - invalid */
13693/* Opcode 0x66 0x0f 0xf0 - invalid */
13694
13695
13696/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13697FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13698{
13699 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13701 if (IEM_IS_MODRM_REG_MODE(bRm))
13702 {
13703 /*
13704 * Register, register - (not implemented, assuming it raises \#UD).
13705 */
13706 IEMOP_RAISE_INVALID_OPCODE_RET();
13707 }
13708 else
13709 {
13710 /*
13711 * Register, memory.
13712 */
13713 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13714 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13716
13717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
13719 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13720 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13721 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13722 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13723
13724 IEM_MC_ADVANCE_RIP_AND_FINISH();
13725 IEM_MC_END();
13726 }
13727}
13728
13729
13730/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13731FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13732{
13733 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13734 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13735}
13736
13737
13738/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13739FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13740{
13741 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13742 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13743}
13744
13745
13746/* Opcode 0xf2 0x0f 0xf1 - invalid */
13747
13748/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13749FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13750{
13751 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13752 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13753}
13754
13755
13756/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13757FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13758{
13759 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13760 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13761}
13762
13763
13764/* Opcode 0xf2 0x0f 0xf2 - invalid */
13765
13766/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13767FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13768{
13769 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13770 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13771}
13772
13773
13774/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13775FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13776{
13777 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13778 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13779}
13780
13781/* Opcode 0xf2 0x0f 0xf3 - invalid */
13782
13783/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13784FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13785{
13786 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13787 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmuludq_u64);
13788}
13789
13790
13791/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13792FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13793{
13794 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13795 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmuludq_u128);
13796}
13797
13798
13799/* Opcode 0xf2 0x0f 0xf4 - invalid */
13800
13801/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13802FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13803{
13804 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13805 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13806}
13807
13808
13809/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13810FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13811{
13812 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13813 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13814}
13815
13816/* Opcode 0xf2 0x0f 0xf5 - invalid */
13817
13818/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13819FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13820{
13821 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13822 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13823}
13824
13825
13826/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13827FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13828{
13829 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13830 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13831}
13832
13833
13834/* Opcode 0xf2 0x0f 0xf6 - invalid */
13835
13836/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13837FNIEMOP_DEF(iemOp_maskmovq_Pq_Nq)
13838{
13839// IEMOP_MNEMONIC2(RM, MASKMOVQ, maskmovq, Pq, Nq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
13840 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13841 if (IEM_IS_MODRM_REG_MODE(bRm))
13842 {
13843 /*
13844 * MMX, MMX, (implicit) [ ER]DI
13845 */
13846 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13848 IEM_MC_LOCAL( uint64_t, u64EffAddr);
13849 IEM_MC_LOCAL( uint64_t, u64Mem);
13850 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Mem, u64Mem, 0);
13851 IEM_MC_ARG( uint64_t const *, puSrc, 1);
13852 IEM_MC_ARG( uint64_t const *, puMsk, 2);
13853 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13854 IEM_MC_PREPARE_FPU_USAGE();
13855 IEM_MC_FPU_TO_MMX_MODE();
13856
13857 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
13858 IEM_MC_FETCH_MEM_U64(u64Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
13859 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_REG_8(bRm));
13860 IEM_MC_REF_MREG_U64_CONST(puMsk, IEM_GET_MODRM_RM_8(bRm));
13861 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovq_u64, pu64Mem, puSrc, puMsk);
13862 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, u64EffAddr, u64Mem);
13863
13864 IEM_MC_ADVANCE_RIP_AND_FINISH();
13865 IEM_MC_END();
13866 }
13867 else
13868 {
13869 /* The memory, register encoding is invalid. */
13870 IEMOP_RAISE_INVALID_OPCODE_RET();
13871 }
13872}
13873
13874
13875/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13876FNIEMOP_DEF(iemOp_maskmovdqu_Vdq_Udq)
13877{
13878// IEMOP_MNEMONIC2(RM, MASKMOVDQU, maskmovdqu, Vdq, Udq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
13879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13880 if (IEM_IS_MODRM_REG_MODE(bRm))
13881 {
13882 /*
13883 * XMM, XMM, (implicit) [ ER]DI
13884 */
13885 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13887 IEM_MC_LOCAL( uint64_t, u64EffAddr);
13888 IEM_MC_LOCAL( RTUINT128U, u128Mem);
13889 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128Mem, u128Mem, 0);
13890 IEM_MC_ARG( PCRTUINT128U, puSrc, 1);
13891 IEM_MC_ARG( PCRTUINT128U, puMsk, 2);
13892 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13893 IEM_MC_PREPARE_SSE_USAGE();
13894
13895 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
13896 IEM_MC_FETCH_MEM_U128(u128Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
13897 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13898 IEM_MC_REF_XREG_U128_CONST(puMsk, IEM_GET_MODRM_RM(pVCpu, bRm));
13899 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovdqu_u128, pu128Mem, puSrc, puMsk);
13900 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, u64EffAddr, u128Mem);
13901
13902 IEM_MC_ADVANCE_RIP_AND_FINISH();
13903 IEM_MC_END();
13904 }
13905 else
13906 {
13907 /* The memory, register encoding is invalid. */
13908 IEMOP_RAISE_INVALID_OPCODE_RET();
13909 }
13910}
13911
13912
13913/* Opcode 0xf2 0x0f 0xf7 - invalid */
13914
13915
13916/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13917FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13918{
13919 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13920 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubb_u64);
13921}
13922
13923
13924/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13925FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13926{
13927 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13928 SSE2_OPT_BODY_FullFull_To_Full(psubb, iemAImpl_psubb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13929}
13930
13931
13932/* Opcode 0xf2 0x0f 0xf8 - invalid */
13933
13934
13935/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13936FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13937{
13938 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13939 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubw_u64);
13940}
13941
13942
13943/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13944FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13945{
13946 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13947 SSE2_OPT_BODY_FullFull_To_Full(psubw, iemAImpl_psubw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13948}
13949
13950
13951/* Opcode 0xf2 0x0f 0xf9 - invalid */
13952
13953
13954/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13955FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13956{
13957 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13958 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubd_u64);
13959}
13960
13961
13962/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13963FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13964{
13965 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13966 SSE2_OPT_BODY_FullFull_To_Full(psubd, iemAImpl_psubd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13967}
13968
13969
13970/* Opcode 0xf2 0x0f 0xfa - invalid */
13971
13972
13973/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13974FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13975{
13976 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13977 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
13978}
13979
13980
13981/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13982FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13983{
13984 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13985 SSE2_OPT_BODY_FullFull_To_Full(psubq, iemAImpl_psubq_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13986}
13987
13988
13989/* Opcode 0xf2 0x0f 0xfb - invalid */
13990
13991
13992/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13993FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13994{
13995 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13996 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddb_u64);
13997}
13998
13999
14000/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
14001FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
14002{
14003 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14004 SSE2_OPT_BODY_FullFull_To_Full(paddb, iemAImpl_paddb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14005}
14006
14007
14008/* Opcode 0xf2 0x0f 0xfc - invalid */
14009
14010
14011/** Opcode 0x0f 0xfd - paddw Pq, Qq */
14012FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
14013{
14014 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14015 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddw_u64);
14016}
14017
14018
14019/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
14020FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
14021{
14022 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14023 SSE2_OPT_BODY_FullFull_To_Full(paddw, iemAImpl_paddw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14024}
14025
14026
14027/* Opcode 0xf2 0x0f 0xfd - invalid */
14028
14029
14030/** Opcode 0x0f 0xfe - paddd Pq, Qq */
14031FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
14032{
14033 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14034 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddd_u64);
14035}
14036
14037
14038/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
14039FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
14040{
14041 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14042 SSE2_OPT_BODY_FullFull_To_Full(paddd, iemAImpl_paddd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14043}
14044
14045
14046/* Opcode 0xf2 0x0f 0xfe - invalid */
14047
14048
14049/** Opcode **** 0x0f 0xff - UD0 */
14050FNIEMOP_DEF(iemOp_ud0)
14051{
14052 IEMOP_MNEMONIC(ud0, "ud0");
14053 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
14054 {
14055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
14056 if (IEM_IS_MODRM_MEM_MODE(bRm))
14057 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
14058 }
14059 IEMOP_HLP_DONE_DECODING();
14060 IEMOP_RAISE_INVALID_OPCODE_RET();
14061}
14062
14063
14064
14065/**
14066 * Two byte opcode map, first byte 0x0f.
14067 *
14068 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
14069 * check if it needs updating as well when making changes.
14070 */
14071const PFNIEMOP g_apfnTwoByteMap[] =
14072{
14073 /* no prefix, 066h prefix f3h prefix, f2h prefix */
14074 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
14075 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
14076 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
14077 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
14078 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
14079 /* 0x05 */ IEMOP_X4(iemOp_syscall),
14080 /* 0x06 */ IEMOP_X4(iemOp_clts),
14081 /* 0x07 */ IEMOP_X4(iemOp_sysret),
14082 /* 0x08 */ IEMOP_X4(iemOp_invd),
14083 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
14084 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
14085 /* 0x0b */ IEMOP_X4(iemOp_ud2),
14086 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
14087 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
14088 /* 0x0e */ IEMOP_X4(iemOp_femms),
14089 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
14090
14091 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
14092 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
14093 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
14094 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14095 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14096 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14097 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
14098 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14099 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
14100 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
14101 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
14102 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
14103 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
14104 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
14105 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
14106 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
14107
14108 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
14109 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
14110 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
14111 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
14112 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
14113 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14114 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
14115 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14116 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14117 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14118 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
14119 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14120 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
14121 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
14122 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14123 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14124
14125 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
14126 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
14127 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
14128 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
14129 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
14130 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
14131 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
14132 /* 0x37 */ IEMOP_X4(iemOp_getsec),
14133 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
14134 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14135 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
14136 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14137 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14138 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14139 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14140 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14141
14142 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
14143 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
14144 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
14145 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
14146 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
14147 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
14148 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
14149 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
14150 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
14151 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
14152 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
14153 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
14154 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
14155 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
14156 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
14157 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
14158
14159 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14160 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
14161 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
14162 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
14163 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14164 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14165 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14166 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14167 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
14168 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
14169 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
14170 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
14171 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
14172 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
14173 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
14174 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
14175
14176 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14177 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14178 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14179 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14180 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14181 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14182 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14183 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14184 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14185 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14186 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14187 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14188 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14189 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14190 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14191 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
14192
14193 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
14194 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
14195 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
14196 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
14197 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14198 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14199 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14200 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14201
14202 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14203 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14204 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14205 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14206 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
14207 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
14208 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
14209 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
14210
14211 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
14212 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
14213 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
14214 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
14215 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
14216 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
14217 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
14218 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
14219 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
14220 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
14221 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
14222 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
14223 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14224 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14225 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14226 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14227
14228 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14229 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14230 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14231 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14232 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14233 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14234 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14235 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14236 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14237 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14238 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14239 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14240 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14241 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14242 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14243 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14244
14245 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14246 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14247 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14248 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14249 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14250 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14251 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14252 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14253 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14254 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14255 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14256 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14257 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14258 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14259 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14260 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14261
14262 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14263 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14264 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14265 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14266 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14267 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14268 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14269 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14270 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14271 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14272 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14273 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14274 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14275 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14276 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14277 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14278
14279 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14280 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14281 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14282 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14283 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14284 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14285 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14286 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14287 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14288 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14289 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14290 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14291 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14292 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14293 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14294 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14295
14296 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14297 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14298 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14299 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14300 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14301 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14302 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14303 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14304 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14305 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14306 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14307 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14308 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14309 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14310 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14311 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14312
14313 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14314 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14315 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14316 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14317 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14318 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14319 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14320 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14321 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14322 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14323 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14324 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14325 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14326 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14327 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14328 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14329
14330 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14331 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14332 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14333 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14334 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14335 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14336 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14337 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14338 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14339 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14340 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14341 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14342 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14343 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14344 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14345 /* 0xff */ IEMOP_X4(iemOp_ud0),
14346};
14347AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14348
14349/** @} */
14350
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette