VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h@ 107044

Last change on this file since 107044 was 106814, checked in by vboxsync, 3 weeks ago

ValidationKit/bootsectors: Implement SIMD FP testcases for cvtpi2pd, cvtpd2pi, cvttpd2pi, and several IEM fixes; bugref:10658; jiraref:VBP-1206

IEM:

  • fix IEM cvttpd2pi, cvttps2pi, cvtps2pi, cvtpd2pi failing to raise x87/MMX exceptions
  • fix IEM cvtpi2ps incorrectly raising x87/MMX exceptions for non-MMX instructions

ValKit bs3-cpu-instr-4 test setup:

  • add FP64_x2 value bar macros
  • fix SDM exception table 23-4/5/6 test cfgs
  • test cvtpi2ps, cvtps2pi, cvttps2pi with correct exceptions: table 23-5
  • test cvtsi2ss, cvtss2si, cvttss2si with correct exceptions: class 3

ValKit bs3-cpu-instr-4 test worker:

  • add ability to mark an instruction test immune to a particular exception

ValKit bs3-cpu-instr-4 tests:

  • add tests for cvtpi2pd, cvtpd2pi, cvttpd2pi
  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 518.6 KB
Line 
1/* $Id: IEMAllInstTwoByte0f.cpp.h 106814 2024-11-01 02:01:09Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2024 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 *
42 * The @a pfnU64 worker function takes no FXSAVE state, just the operands.
43 */
44FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
45{
46 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
47 if (IEM_IS_MODRM_REG_MODE(bRm))
48 {
49 /*
50 * MMX, MMX.
51 */
52 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
53 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
54 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
55 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
56 IEM_MC_ARG(uint64_t *, pDst, 0);
57 IEM_MC_ARG(uint64_t const *, pSrc, 1);
58 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
59 IEM_MC_PREPARE_FPU_USAGE();
60 IEM_MC_FPU_TO_MMX_MODE();
61
62 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
63 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
64 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
65 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
66
67 IEM_MC_ADVANCE_RIP_AND_FINISH();
68 IEM_MC_END();
69 }
70 else
71 {
72 /*
73 * MMX, [mem64].
74 */
75 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
76 IEM_MC_ARG(uint64_t *, pDst, 0);
77 IEM_MC_LOCAL(uint64_t, uSrc);
78 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
79 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
80
81 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
82 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
83 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
84 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
85
86 IEM_MC_PREPARE_FPU_USAGE();
87 IEM_MC_FPU_TO_MMX_MODE();
88
89 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
90 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
91 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
92
93 IEM_MC_ADVANCE_RIP_AND_FINISH();
94 IEM_MC_END();
95 }
96}
97
98
99/**
100 * Common worker for MMX instructions on the form:
101 * pxxx mm1, mm2/mem64
102 * for instructions introduced with SSE.
103 *
104 * The @a pfnU64 worker function takes no FXSAVE state, just the operands.
105 */
106FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
107{
108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
109 if (IEM_IS_MODRM_REG_MODE(bRm))
110 {
111 /*
112 * MMX, MMX.
113 */
114 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
115 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
116 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
118 IEM_MC_ARG(uint64_t *, pDst, 0);
119 IEM_MC_ARG(uint64_t const *, pSrc, 1);
120 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
121 IEM_MC_PREPARE_FPU_USAGE();
122 IEM_MC_FPU_TO_MMX_MODE();
123
124 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
125 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
126 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
127 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
128
129 IEM_MC_ADVANCE_RIP_AND_FINISH();
130 IEM_MC_END();
131 }
132 else
133 {
134 /*
135 * MMX, [mem64].
136 */
137 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
138 IEM_MC_ARG(uint64_t *, pDst, 0);
139 IEM_MC_LOCAL(uint64_t, uSrc);
140 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
142
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
145 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
146 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
147
148 IEM_MC_PREPARE_FPU_USAGE();
149 IEM_MC_FPU_TO_MMX_MODE();
150
151 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
152 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
153 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
154
155 IEM_MC_ADVANCE_RIP_AND_FINISH();
156 IEM_MC_END();
157 }
158}
159
160
161/**
162 * Common worker for MMX instructions on the form:
163 * pxxx mm1, mm2/mem64
164 * that was introduced with SSE2.
165 */
166FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
167{
168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
169 if (IEM_IS_MODRM_REG_MODE(bRm))
170 {
171 /*
172 * MMX, MMX.
173 */
174 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
175 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
176 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
178 IEM_MC_ARG(uint64_t *, pDst, 0);
179 IEM_MC_ARG(uint64_t const *, pSrc, 1);
180 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
181 IEM_MC_PREPARE_FPU_USAGE();
182 IEM_MC_FPU_TO_MMX_MODE();
183
184 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
185 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
186 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
187 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
188
189 IEM_MC_ADVANCE_RIP_AND_FINISH();
190 IEM_MC_END();
191 }
192 else
193 {
194 /*
195 * MMX, [mem64].
196 */
197 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
198 IEM_MC_ARG(uint64_t *, pDst, 0);
199 IEM_MC_LOCAL(uint64_t, uSrc);
200 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
202
203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
205 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
206 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
207
208 IEM_MC_PREPARE_FPU_USAGE();
209 IEM_MC_FPU_TO_MMX_MODE();
210
211 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
212 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
213 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
214
215 IEM_MC_ADVANCE_RIP_AND_FINISH();
216 IEM_MC_END();
217 }
218}
219
220
221/**
222 * Common worker for SSE instructions of the form:
223 * pxxx xmm1, xmm2/mem128
224 *
225 * Proper alignment of the 128-bit operand is enforced.
226 * SSE cpuid checks. No SIMD FP exceptions.
227 *
228 * The @a pfnU128 worker function takes no FXSAVE state, just the operands.
229 *
230 * @sa iemOpCommonSse2_FullFull_To_Full
231 */
232FNIEMOP_DEF_1(iemOpCommonSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
233{
234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
235 if (IEM_IS_MODRM_REG_MODE(bRm))
236 {
237 /*
238 * XMM, XMM.
239 */
240 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
242 IEM_MC_ARG(PRTUINT128U, pDst, 0);
243 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
244 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
245 IEM_MC_PREPARE_SSE_USAGE();
246 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
248 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
249 IEM_MC_ADVANCE_RIP_AND_FINISH();
250 IEM_MC_END();
251 }
252 else
253 {
254 /*
255 * XMM, [mem128].
256 */
257 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
258 IEM_MC_ARG(PRTUINT128U, pDst, 0);
259 IEM_MC_LOCAL(RTUINT128U, uSrc);
260 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
262
263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
265 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
266 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
267
268 IEM_MC_PREPARE_SSE_USAGE();
269 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
270 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
271
272 IEM_MC_ADVANCE_RIP_AND_FINISH();
273 IEM_MC_END();
274 }
275}
276
277
278/**
279 * Common worker for SSE2 instructions on the forms:
280 * pxxx xmm1, xmm2/mem128
281 *
282 * Proper alignment of the 128-bit operand is enforced.
283 * Exceptions type 4. SSE2 cpuid checks.
284 *
285 * The @a pfnU128 worker function takes no FXSAVE state, just the operands.
286 *
287 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
288 */
289FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
290{
291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
292 if (IEM_IS_MODRM_REG_MODE(bRm))
293 {
294 /*
295 * XMM, XMM.
296 */
297 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
299 IEM_MC_ARG(PRTUINT128U, pDst, 0);
300 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
302 IEM_MC_PREPARE_SSE_USAGE();
303 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
304 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
305 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
306 IEM_MC_ADVANCE_RIP_AND_FINISH();
307 IEM_MC_END();
308 }
309 else
310 {
311 /*
312 * XMM, [mem128].
313 */
314 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
315 IEM_MC_ARG(PRTUINT128U, pDst, 0);
316 IEM_MC_LOCAL(RTUINT128U, uSrc);
317 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
319
320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
322 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
323 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
324
325 IEM_MC_PREPARE_SSE_USAGE();
326 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
327 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
328
329 IEM_MC_ADVANCE_RIP_AND_FINISH();
330 IEM_MC_END();
331 }
332}
333
334
335/**
336 * A body preprocessor variant of iemOpCommonSse2Opt_FullFull_To_Full in order
337 * to support native emitters for certain instructions.
338 */
339#define SSE2_OPT_BODY_FullFull_To_Full(a_Ins, a_pImplExpr, a_fRegNativeArchs, a_fMemNativeArchs) \
340 PFNIEMAIMPLMEDIAOPTF2U128 const pfnU128 = (a_pImplExpr); \
341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
342 if (IEM_IS_MODRM_REG_MODE(bRm)) \
343 { \
344 /* \
345 * XMM, XMM. \
346 */ \
347 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2); \
349 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
350 IEM_MC_PREPARE_SSE_USAGE(); \
351 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
352 IEM_MC_NATIVE_EMIT_2(RT_CONCAT3(iemNativeEmit_,a_Ins,_rr_u128), IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
353 } IEM_MC_NATIVE_ELSE() { \
354 IEM_MC_ARG(PRTUINT128U, pDst, 0); \
355 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
356 IEM_MC_ARG(PCRTUINT128U, pSrc, 1); \
357 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
358 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc); \
359 } IEM_MC_NATIVE_ENDIF(); \
360 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
361 IEM_MC_END(); \
362 } \
363 else \
364 { \
365 /* \
366 * XMM, [mem128]. \
367 */ \
368 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
369 IEM_MC_LOCAL(RTUINT128U, uSrc); \
370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2); \
373 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
374 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
375 IEM_MC_PREPARE_SSE_USAGE(); \
376 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
377 IEM_MC_NATIVE_EMIT_2(RT_CONCAT3(iemNativeEmit_,a_Ins,_rv_u128), IEM_GET_MODRM_REG(pVCpu, bRm), uSrc); \
378 } IEM_MC_NATIVE_ELSE() { \
379 IEM_MC_ARG(PRTUINT128U, pDst, 0); \
380 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
381 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1); \
382 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc); \
383 } IEM_MC_NATIVE_ENDIF(); \
384 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
385 IEM_MC_END(); \
386 } void(0)
387
388
389/**
390 * Common worker for MMX instructions on the forms:
391 * pxxxx mm1, mm2/mem32
392 *
393 * The 2nd operand is the first half of a register, which in the memory case
394 * means a 32-bit memory access.
395 */
396FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
397{
398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
399 if (IEM_IS_MODRM_REG_MODE(bRm))
400 {
401 /*
402 * MMX, MMX.
403 */
404 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
406 IEM_MC_ARG(uint64_t *, puDst, 0);
407 IEM_MC_ARG(uint64_t const *, puSrc, 1);
408 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
409 IEM_MC_PREPARE_FPU_USAGE();
410 IEM_MC_FPU_TO_MMX_MODE();
411
412 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
413 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
414 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
415 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
416
417 IEM_MC_ADVANCE_RIP_AND_FINISH();
418 IEM_MC_END();
419 }
420 else
421 {
422 /*
423 * MMX, [mem32].
424 */
425 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
426 IEM_MC_ARG(uint64_t *, puDst, 0);
427 IEM_MC_LOCAL(uint64_t, uSrc);
428 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
430
431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
433 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
434 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
435
436 IEM_MC_PREPARE_FPU_USAGE();
437 IEM_MC_FPU_TO_MMX_MODE();
438
439 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
440 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
441 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
442
443 IEM_MC_ADVANCE_RIP_AND_FINISH();
444 IEM_MC_END();
445 }
446}
447
448
449/**
450 * Common worker for SSE instructions on the forms:
451 * pxxxx xmm1, xmm2/mem128
452 *
453 * The 2nd operand is the first half of a register, which in the memory case
454 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
455 *
456 * Exceptions type 4.
457 */
458FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
459{
460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
461 if (IEM_IS_MODRM_REG_MODE(bRm))
462 {
463 /*
464 * XMM, XMM.
465 */
466 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
468 IEM_MC_ARG(PRTUINT128U, puDst, 0);
469 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
470 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
471 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
472 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
473 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
474 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
475 IEM_MC_ADVANCE_RIP_AND_FINISH();
476 IEM_MC_END();
477 }
478 else
479 {
480 /*
481 * XMM, [mem128].
482 */
483 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
484 IEM_MC_ARG(PRTUINT128U, puDst, 0);
485 IEM_MC_LOCAL(RTUINT128U, uSrc);
486 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
488
489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
491 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
492 /** @todo Most CPUs probably only read the low qword. We read everything to
493 * make sure we apply segmentation and alignment checks correctly.
494 * When we have time, it would be interesting to explore what real
495 * CPUs actually does and whether it will do a TLB load for the high
496 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
497 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
498
499 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
500 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
501 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
502
503 IEM_MC_ADVANCE_RIP_AND_FINISH();
504 IEM_MC_END();
505 }
506}
507
508
509/**
510 * Common worker for SSE2 instructions on the forms:
511 * pxxxx xmm1, xmm2/mem128
512 *
513 * The 2nd operand is the first half of a register, which in the memory case
514 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
515 *
516 * Exceptions type 4.
517 */
518FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
519{
520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
521 if (IEM_IS_MODRM_REG_MODE(bRm))
522 {
523 /*
524 * XMM, XMM.
525 */
526 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
528 IEM_MC_ARG(PRTUINT128U, puDst, 0);
529 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
530 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
531 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
532 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
533 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
534 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
535 IEM_MC_ADVANCE_RIP_AND_FINISH();
536 IEM_MC_END();
537 }
538 else
539 {
540 /*
541 * XMM, [mem128].
542 */
543 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
544 IEM_MC_ARG(PRTUINT128U, puDst, 0);
545 IEM_MC_LOCAL(RTUINT128U, uSrc);
546 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
548
549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
551 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
552 /** @todo Most CPUs probably only read the low qword. We read everything to
553 * make sure we apply segmentation and alignment checks correctly.
554 * When we have time, it would be interesting to explore what real
555 * CPUs actually does and whether it will do a TLB load for the high
556 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
557 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
558
559 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
560 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
562
563 IEM_MC_ADVANCE_RIP_AND_FINISH();
564 IEM_MC_END();
565 }
566}
567
568
569/**
570 * Common worker for MMX instructions on the form:
571 * pxxxx mm1, mm2/mem64
572 *
573 * The 2nd operand is the second half of a register, which in the memory case
574 * means a 64-bit memory access for MMX.
575 */
576FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
577{
578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
579 if (IEM_IS_MODRM_REG_MODE(bRm))
580 {
581 /*
582 * MMX, MMX.
583 */
584 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
585 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
586 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
588 IEM_MC_ARG(uint64_t *, puDst, 0);
589 IEM_MC_ARG(uint64_t const *, puSrc, 1);
590 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
591 IEM_MC_PREPARE_FPU_USAGE();
592 IEM_MC_FPU_TO_MMX_MODE();
593
594 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
595 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
596 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
597 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
598
599 IEM_MC_ADVANCE_RIP_AND_FINISH();
600 IEM_MC_END();
601 }
602 else
603 {
604 /*
605 * MMX, [mem64].
606 */
607 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
608 IEM_MC_ARG(uint64_t *, puDst, 0);
609 IEM_MC_LOCAL(uint64_t, uSrc);
610 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
612
613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
615 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
616 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
617
618 IEM_MC_PREPARE_FPU_USAGE();
619 IEM_MC_FPU_TO_MMX_MODE();
620
621 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
623 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
624
625 IEM_MC_ADVANCE_RIP_AND_FINISH();
626 IEM_MC_END();
627 }
628}
629
630
631/**
632 * Common worker for SSE instructions on the form:
633 * pxxxx xmm1, xmm2/mem128
634 *
635 * The 2nd operand is the second half of a register, which for SSE a 128-bit
636 * aligned access where it may read the full 128 bits or only the upper 64 bits.
637 *
638 * Exceptions type 4.
639 */
640FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
641{
642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
643 if (IEM_IS_MODRM_REG_MODE(bRm))
644 {
645 /*
646 * XMM, XMM.
647 */
648 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
650 IEM_MC_ARG(PRTUINT128U, puDst, 0);
651 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
652 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
653 IEM_MC_PREPARE_SSE_USAGE();
654 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
655 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
656 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
657 IEM_MC_ADVANCE_RIP_AND_FINISH();
658 IEM_MC_END();
659 }
660 else
661 {
662 /*
663 * XMM, [mem128].
664 */
665 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
666 IEM_MC_ARG(PRTUINT128U, puDst, 0);
667 IEM_MC_LOCAL(RTUINT128U, uSrc);
668 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
670
671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
673 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
674 /** @todo Most CPUs probably only read the high qword. We read everything to
675 * make sure we apply segmentation and alignment checks correctly.
676 * When we have time, it would be interesting to explore what real
677 * CPUs actually does and whether it will do a TLB load for the lower
678 * part or skip any associated \#PF. */
679 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
680
681 IEM_MC_PREPARE_SSE_USAGE();
682 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
683 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
684
685 IEM_MC_ADVANCE_RIP_AND_FINISH();
686 IEM_MC_END();
687 }
688}
689
690
691/**
692 * Common worker for SSE instructions on the forms:
693 * pxxs xmm1, xmm2/mem128
694 *
695 * Proper alignment of the 128-bit operand is enforced.
696 * Exceptions type 2. SSE cpuid checks.
697 *
698 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
699 */
700FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
701{
702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
703 if (IEM_IS_MODRM_REG_MODE(bRm))
704 {
705 /*
706 * XMM128, XMM128.
707 */
708 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
710 IEM_MC_LOCAL(X86XMMREG, SseRes);
711 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
712 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
713 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
714 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
715 IEM_MC_PREPARE_SSE_USAGE();
716 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
717 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
718 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
719 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
720
721 IEM_MC_ADVANCE_RIP_AND_FINISH();
722 IEM_MC_END();
723 }
724 else
725 {
726 /*
727 * XMM128, [mem128].
728 */
729 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
730 IEM_MC_LOCAL(X86XMMREG, SseRes);
731 IEM_MC_LOCAL(X86XMMREG, uSrc2);
732 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
733 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
734 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
736
737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
739 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
740 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
741
742 IEM_MC_PREPARE_SSE_USAGE();
743 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
744 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
745 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
746
747 IEM_MC_ADVANCE_RIP_AND_FINISH();
748 IEM_MC_END();
749 }
750}
751
752
753/**
754 * A body preprocessor variant of iemOpCommonSseFp_FullFull_To_Full in order
755 * to support native emitters for certain instructions.
756 */
757#define SSE_FP_BODY_FullFull_To_Full(a_Ins, a_pImplExpr, a_fRegNativeArchs, a_fMemNativeArchs) \
758 PFNIEMAIMPLFPSSEF2U128 const pfnU128 = (a_pImplExpr); \
759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
760 if (IEM_IS_MODRM_REG_MODE(bRm)) \
761 { \
762 /* \
763 * XMM, XMM. \
764 */ \
765 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse); \
767 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
768 IEM_MC_PREPARE_SSE_USAGE(); \
769 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
770 IEM_MC_LIVENESS_MXCSR_MODIFY(); \
771 IEM_MC_NATIVE_EMIT_2_EX(RT_CONCAT3(iemNativeEmit_,a_Ins,_rr_u128), IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
772 } IEM_MC_NATIVE_ELSE() { \
773 IEM_MC_LOCAL(X86XMMREG, SseRes); \
774 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0); \
775 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1); \
776 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
777 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2); \
778 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
779 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2); \
780 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes); \
781 } IEM_MC_NATIVE_ENDIF(); \
782 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
783 IEM_MC_END(); \
784 } \
785 else \
786 { \
787 /* \
788 * XMM, [mem128]. \
789 */ \
790 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
791 IEM_MC_LOCAL(X86XMMREG, uSrc2); \
792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse); \
795 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
796 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
797 IEM_MC_PREPARE_SSE_USAGE(); \
798 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
799 IEM_MC_LIVENESS_MXCSR_MODIFY(); \
800 IEM_MC_NATIVE_EMIT_2_EX(RT_CONCAT3(iemNativeEmit_,a_Ins,_rv_u128), IEM_GET_MODRM_REG(pVCpu, bRm), uSrc2); \
801 } IEM_MC_NATIVE_ELSE() { \
802 IEM_MC_LOCAL(X86XMMREG, SseRes); \
803 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0); \
804 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1); \
805 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2); \
806 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
807 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2); \
808 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes); \
809 } IEM_MC_NATIVE_ENDIF(); \
810 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
811 IEM_MC_END(); \
812 } void(0)
813
814
815/**
816 * Common worker for SSE instructions on the forms:
817 * pxxs xmm1, xmm2/mem32
818 *
819 * Proper alignment of the 128-bit operand is enforced.
820 * Exceptions type 3. SSE cpuid checks.
821 *
822 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
823 */
824FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
825{
826 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
827 if (IEM_IS_MODRM_REG_MODE(bRm))
828 {
829 /*
830 * XMM128, XMM32.
831 */
832 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
834 IEM_MC_LOCAL(X86XMMREG, SseRes);
835 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
836 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
837 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
838 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
839 IEM_MC_PREPARE_SSE_USAGE();
840 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
841 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
842 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
843 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
844
845 IEM_MC_ADVANCE_RIP_AND_FINISH();
846 IEM_MC_END();
847 }
848 else
849 {
850 /*
851 * XMM128, [mem32].
852 */
853 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
854 IEM_MC_LOCAL(X86XMMREG, SseRes);
855 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
856 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
857 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
858 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
860
861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
863 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
864 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
865
866 IEM_MC_PREPARE_SSE_USAGE();
867 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
868 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
869 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
870
871 IEM_MC_ADVANCE_RIP_AND_FINISH();
872 IEM_MC_END();
873 }
874}
875
876
877/**
878 * Common worker for SSE2 instructions on the forms:
879 * pxxd xmm1, xmm2/mem128
880 *
881 * Proper alignment of the 128-bit operand is enforced.
882 * Exceptions type 2. SSE cpuid checks.
883 *
884 * @sa iemOpCommonSseFp_FullFull_To_Full
885 */
886FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
887{
888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
889 if (IEM_IS_MODRM_REG_MODE(bRm))
890 {
891 /*
892 * XMM128, XMM128.
893 */
894 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
896 IEM_MC_LOCAL(X86XMMREG, SseRes);
897 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
898 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
899 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
900 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
901 IEM_MC_PREPARE_SSE_USAGE();
902 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
903 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
904 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
905 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
906
907 IEM_MC_ADVANCE_RIP_AND_FINISH();
908 IEM_MC_END();
909 }
910 else
911 {
912 /*
913 * XMM128, [mem128].
914 */
915 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
916 IEM_MC_LOCAL(X86XMMREG, SseRes);
917 IEM_MC_LOCAL(X86XMMREG, uSrc2);
918 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
919 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
920 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
922
923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
926 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
927
928 IEM_MC_PREPARE_SSE_USAGE();
929 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
930 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
931 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
932
933 IEM_MC_ADVANCE_RIP_AND_FINISH();
934 IEM_MC_END();
935 }
936}
937
938
939/**
940 * Common worker for SSE2 instructions on the forms:
941 * pxxs xmm1, xmm2/mem64
942 *
943 * Proper alignment of the 128-bit operand is enforced.
944 * Exceptions type 3. SSE2 cpuid checks.
945 *
946 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
947 */
948FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
949{
950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
951 if (IEM_IS_MODRM_REG_MODE(bRm))
952 {
953 /*
954 * XMM, XMM.
955 */
956 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
958 IEM_MC_LOCAL(X86XMMREG, SseRes);
959 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
960 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
961 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
962 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
963 IEM_MC_PREPARE_SSE_USAGE();
964 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
965 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
966 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
967 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
968
969 IEM_MC_ADVANCE_RIP_AND_FINISH();
970 IEM_MC_END();
971 }
972 else
973 {
974 /*
975 * XMM, [mem64].
976 */
977 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
978 IEM_MC_LOCAL(X86XMMREG, SseRes);
979 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
980 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
981 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
982 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
984
985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
987 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
988 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
989
990 IEM_MC_PREPARE_SSE_USAGE();
991 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
992 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
993 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
994
995 IEM_MC_ADVANCE_RIP_AND_FINISH();
996 IEM_MC_END();
997 }
998}
999
1000
1001/**
1002 * Common worker for SSE2 instructions on the form:
1003 * pxxxx xmm1, xmm2/mem128
1004 *
1005 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1006 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1007 *
1008 * Exceptions type 4.
1009 */
1010FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1011{
1012 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1013 if (IEM_IS_MODRM_REG_MODE(bRm))
1014 {
1015 /*
1016 * XMM, XMM.
1017 */
1018 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1020 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1021 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1022 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1023 IEM_MC_PREPARE_SSE_USAGE();
1024 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1025 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1026 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1027 IEM_MC_ADVANCE_RIP_AND_FINISH();
1028 IEM_MC_END();
1029 }
1030 else
1031 {
1032 /*
1033 * XMM, [mem128].
1034 */
1035 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1036 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1037 IEM_MC_LOCAL(RTUINT128U, uSrc);
1038 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1040
1041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1043 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1044 /** @todo Most CPUs probably only read the high qword. We read everything to
1045 * make sure we apply segmentation and alignment checks correctly.
1046 * When we have time, it would be interesting to explore what real
1047 * CPUs actually does and whether it will do a TLB load for the lower
1048 * part or skip any associated \#PF. */
1049 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1050
1051 IEM_MC_PREPARE_SSE_USAGE();
1052 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1053 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1054
1055 IEM_MC_ADVANCE_RIP_AND_FINISH();
1056 IEM_MC_END();
1057 }
1058}
1059
1060
1061/**
1062 * Common worker for SSE3 instructions on the forms:
1063 * hxxx xmm1, xmm2/mem128
1064 *
1065 * Proper alignment of the 128-bit operand is enforced.
1066 * Exceptions type 2. SSE3 cpuid checks.
1067 *
1068 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1069 */
1070FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1071{
1072 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1073 if (IEM_IS_MODRM_REG_MODE(bRm))
1074 {
1075 /*
1076 * XMM, XMM.
1077 */
1078 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1080 IEM_MC_LOCAL(X86XMMREG, SseRes);
1081 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
1082 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1083 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1084 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1085 IEM_MC_PREPARE_SSE_USAGE();
1086 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1087 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1088 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1089 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
1090
1091 IEM_MC_ADVANCE_RIP_AND_FINISH();
1092 IEM_MC_END();
1093 }
1094 else
1095 {
1096 /*
1097 * XMM, [mem128].
1098 */
1099 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1100 IEM_MC_LOCAL(X86XMMREG, SseRes);
1101 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1102 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
1103 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1104 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1106
1107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1109 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1110 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1111
1112 IEM_MC_PREPARE_SSE_USAGE();
1113 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1114 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1115 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
1116
1117 IEM_MC_ADVANCE_RIP_AND_FINISH();
1118 IEM_MC_END();
1119 }
1120}
1121
1122
1123/** Opcode 0x0f 0x00 /0. */
1124FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1125{
1126 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1127 IEMOP_HLP_MIN_286();
1128 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1129
1130 if (IEM_IS_MODRM_REG_MODE(bRm))
1131 {
1132 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1133 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1134 iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1135 }
1136
1137 /* Ignore operand size here, memory refs are always 16-bit. */
1138 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1139 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1141 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1142 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1143 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1144 IEM_MC_END();
1145}
1146
1147
1148/** Opcode 0x0f 0x00 /1. */
1149FNIEMOPRM_DEF(iemOp_Grp6_str)
1150{
1151 IEMOP_MNEMONIC(str, "str Rv/Mw");
1152 IEMOP_HLP_MIN_286();
1153 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1154
1155
1156 if (IEM_IS_MODRM_REG_MODE(bRm))
1157 {
1158 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1159 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1160 iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1161 }
1162
1163 /* Ignore operand size here, memory refs are always 16-bit. */
1164 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1165 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1167 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1168 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1169 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1170 IEM_MC_END();
1171}
1172
1173
1174/** Opcode 0x0f 0x00 /2. */
1175FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1176{
1177 IEMOP_MNEMONIC(lldt, "lldt Ew");
1178 IEMOP_HLP_MIN_286();
1179 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1180
1181 if (IEM_IS_MODRM_REG_MODE(bRm))
1182 {
1183 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1184 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1185 IEM_MC_ARG(uint16_t, u16Sel, 0);
1186 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1187 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1188 IEM_MC_END();
1189 }
1190 else
1191 {
1192 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1193 IEM_MC_ARG(uint16_t, u16Sel, 0);
1194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1196 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1197 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1198 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1199 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1200 IEM_MC_END();
1201 }
1202}
1203
1204
1205/** Opcode 0x0f 0x00 /3. */
1206FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1207{
1208 IEMOP_MNEMONIC(ltr, "ltr Ew");
1209 IEMOP_HLP_MIN_286();
1210 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1211
1212 if (IEM_IS_MODRM_REG_MODE(bRm))
1213 {
1214 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1216 IEM_MC_ARG(uint16_t, u16Sel, 0);
1217 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1218 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1219 IEM_MC_END();
1220 }
1221 else
1222 {
1223 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1224 IEM_MC_ARG(uint16_t, u16Sel, 0);
1225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1228 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1229 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1230 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1231 IEM_MC_END();
1232 }
1233}
1234
1235
1236/* Need to associate flag info with the blocks, so duplicate the code. */
1237#define IEMOP_BODY_GRP6_VERX(bRm, fWrite) \
1238 IEMOP_HLP_MIN_286(); \
1239 IEMOP_HLP_NO_REAL_OR_V86_MODE(); \
1240 \
1241 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1242 { \
1243 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0); \
1244 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1245 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1246 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1247 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1248 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1249 IEM_MC_END(); \
1250 } \
1251 else \
1252 { \
1253 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0); \
1254 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1255 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1258 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1259 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1260 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1261 IEM_MC_END(); \
1262 } (void)0
1263
1264/**
1265 * @opmaps grp6
1266 * @opcode /4
1267 * @opflmodify zf
1268 */
1269FNIEMOPRM_DEF(iemOp_Grp6_verr)
1270{
1271 IEMOP_MNEMONIC(verr, "verr Ew");
1272 IEMOP_BODY_GRP6_VERX(bRm, false);
1273}
1274
1275
1276/**
1277 * @opmaps grp6
1278 * @opcode /5
1279 * @opflmodify zf
1280 */
1281FNIEMOPRM_DEF(iemOp_Grp6_verw)
1282{
1283 IEMOP_MNEMONIC(verw, "verw Ew");
1284 IEMOP_BODY_GRP6_VERX(bRm, true);
1285}
1286
1287
1288/**
1289 * Group 6 jump table.
1290 */
1291IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1292{
1293 iemOp_Grp6_sldt,
1294 iemOp_Grp6_str,
1295 iemOp_Grp6_lldt,
1296 iemOp_Grp6_ltr,
1297 iemOp_Grp6_verr,
1298 iemOp_Grp6_verw,
1299 iemOp_InvalidWithRM,
1300 iemOp_InvalidWithRM
1301};
1302
1303/** Opcode 0x0f 0x00. */
1304FNIEMOP_DEF(iemOp_Grp6)
1305{
1306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1307 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1308}
1309
1310
1311/** Opcode 0x0f 0x01 /0. */
1312FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1313{
1314 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1315 IEMOP_HLP_MIN_286();
1316 IEMOP_HLP_64BIT_OP_SIZE();
1317 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1318 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1321 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1322 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1323 IEM_MC_END();
1324}
1325
1326
1327/** Opcode 0x0f 0x01 /0. */
1328FNIEMOP_DEF(iemOp_Grp7_vmcall)
1329{
1330 IEMOP_MNEMONIC(vmcall, "vmcall");
1331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1332
1333 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1334 want all hypercalls regardless of instruction used, and if a
1335 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1336 (NEM/win makes ASSUMPTIONS about this behavior.) */
1337 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmcall);
1338}
1339
1340
1341/** Opcode 0x0f 0x01 /0. */
1342#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1343FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1344{
1345 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1346 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1347 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1348 IEMOP_HLP_DONE_DECODING();
1349 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1350 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1351 iemCImpl_vmlaunch);
1352}
1353#else
1354FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1355{
1356 IEMOP_BITCH_ABOUT_STUB();
1357 IEMOP_RAISE_INVALID_OPCODE_RET();
1358}
1359#endif
1360
1361
1362/** Opcode 0x0f 0x01 /0. */
1363#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1364FNIEMOP_DEF(iemOp_Grp7_vmresume)
1365{
1366 IEMOP_MNEMONIC(vmresume, "vmresume");
1367 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1368 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1369 IEMOP_HLP_DONE_DECODING();
1370 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1371 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1372 iemCImpl_vmresume);
1373}
1374#else
1375FNIEMOP_DEF(iemOp_Grp7_vmresume)
1376{
1377 IEMOP_BITCH_ABOUT_STUB();
1378 IEMOP_RAISE_INVALID_OPCODE_RET();
1379}
1380#endif
1381
1382
1383/** Opcode 0x0f 0x01 /0. */
1384#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1385FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1386{
1387 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1388 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1389 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1390 IEMOP_HLP_DONE_DECODING();
1391 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmxoff);
1392}
1393#else
1394FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1395{
1396 IEMOP_BITCH_ABOUT_STUB();
1397 IEMOP_RAISE_INVALID_OPCODE_RET();
1398}
1399#endif
1400
1401
1402/** Opcode 0x0f 0x01 /1. */
1403FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1404{
1405 IEMOP_MNEMONIC(sidt, "sidt Ms");
1406 IEMOP_HLP_MIN_286();
1407 IEMOP_HLP_64BIT_OP_SIZE();
1408 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1409 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1412 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1413 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1414 IEM_MC_END();
1415}
1416
1417
1418/** Opcode 0x0f 0x01 /1. */
1419FNIEMOP_DEF(iemOp_Grp7_monitor)
1420{
1421 IEMOP_MNEMONIC(monitor, "monitor");
1422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1423 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1424}
1425
1426
1427/** Opcode 0x0f 0x01 /1. */
1428FNIEMOP_DEF(iemOp_Grp7_mwait)
1429{
1430 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1432 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_mwait);
1433}
1434
1435
1436/** Opcode 0x0f 0x01 /2. */
1437FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1438{
1439 IEMOP_MNEMONIC(lgdt, "lgdt");
1440 IEMOP_HLP_64BIT_OP_SIZE();
1441 IEM_MC_BEGIN(0, 0);
1442 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1445 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1446 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1447 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1448 IEM_MC_END();
1449}
1450
1451
1452/** Opcode 0x0f 0x01 0xd0. */
1453FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1454{
1455 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1456 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1457 {
1458 /** @todo r=ramshankar: We should use
1459 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1460 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1461/** @todo testcase: test prefixes and exceptions. currently not checking for the
1462 * OPSIZE one ... */
1463 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1464 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
1465 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1466 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
1467 iemCImpl_xgetbv);
1468 }
1469 IEMOP_RAISE_INVALID_OPCODE_RET();
1470}
1471
1472
1473/** Opcode 0x0f 0x01 0xd1. */
1474FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1475{
1476 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1477 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1478 {
1479 /** @todo r=ramshankar: We should use
1480 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1481 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1482/** @todo testcase: test prefixes and exceptions. currently not checking for the
1483 * OPSIZE one ... */
1484 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1485 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_xsetbv);
1486 }
1487 IEMOP_RAISE_INVALID_OPCODE_RET();
1488}
1489
1490
1491/** Opcode 0x0f 0x01 /3. */
1492FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1493{
1494 IEMOP_MNEMONIC(lidt, "lidt");
1495 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1496 IEM_MC_BEGIN(0, 0);
1497 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1501 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg, /*=*/ enmEffOpSize, 2);
1502 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1503 IEM_MC_END();
1504}
1505
1506
1507/** Opcode 0x0f 0x01 0xd8. */
1508#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1509FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1510{
1511 IEMOP_MNEMONIC(vmrun, "vmrun");
1512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1513 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1514 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1515 iemCImpl_vmrun);
1516}
1517#else
1518FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1519#endif
1520
1521/** Opcode 0x0f 0x01 0xd9. */
1522FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1523{
1524 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1525 /** @todo r=bird: Table A-8 on page 524 in vol 3 has VMGEXIT for this
1526 * opcode sequence when F3 or F2 is used as prefix. So, the assumtion
1527 * here cannot be right... */
1528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1529
1530 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1531 want all hypercalls regardless of instruction used, and if a
1532 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1533 (NEM/win makes ASSUMPTIONS about this behavior.) */
1534 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmmcall);
1535}
1536
1537/** Opcode 0x0f 0x01 0xda. */
1538#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1539FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1540{
1541 IEMOP_MNEMONIC(vmload, "vmload");
1542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1543 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmload);
1544}
1545#else
1546FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1547#endif
1548
1549
1550/** Opcode 0x0f 0x01 0xdb. */
1551#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1552FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1553{
1554 IEMOP_MNEMONIC(vmsave, "vmsave");
1555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1556 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmsave);
1557}
1558#else
1559FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1560#endif
1561
1562
1563/** Opcode 0x0f 0x01 0xdc. */
1564#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1565FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1566{
1567 IEMOP_MNEMONIC(stgi, "stgi");
1568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1569 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_stgi);
1570}
1571#else
1572FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1573#endif
1574
1575
1576/** Opcode 0x0f 0x01 0xdd. */
1577#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1578FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1579{
1580 IEMOP_MNEMONIC(clgi, "clgi");
1581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1582 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clgi);
1583}
1584#else
1585FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1586#endif
1587
1588
1589/** Opcode 0x0f 0x01 0xdf. */
1590#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1591FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1592{
1593 IEMOP_MNEMONIC(invlpga, "invlpga");
1594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1595 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpga);
1596}
1597#else
1598FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1599#endif
1600
1601
1602/** Opcode 0x0f 0x01 0xde. */
1603#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1604FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1605{
1606 IEMOP_MNEMONIC(skinit, "skinit");
1607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1608 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_skinit);
1609}
1610#else
1611FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1612#endif
1613
1614
1615/** Opcode 0x0f 0x01 /4. */
1616FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1617{
1618 IEMOP_MNEMONIC(smsw, "smsw");
1619 IEMOP_HLP_MIN_286();
1620 if (IEM_IS_MODRM_REG_MODE(bRm))
1621 {
1622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1623 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1624 iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1625 }
1626
1627 /* Ignore operand size here, memory refs are always 16-bit. */
1628 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1629 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1632 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1633 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1634 IEM_MC_END();
1635}
1636
1637
1638/** Opcode 0x0f 0x01 /6. */
1639FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1640{
1641 /* The operand size is effectively ignored, all is 16-bit and only the
1642 lower 3-bits are used. */
1643 IEMOP_MNEMONIC(lmsw, "lmsw");
1644 IEMOP_HLP_MIN_286();
1645 if (IEM_IS_MODRM_REG_MODE(bRm))
1646 {
1647 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1649 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1650 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1651 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1652 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1653 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1654 IEM_MC_END();
1655 }
1656 else
1657 {
1658 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1659 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1660 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1663 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1664 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1665 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1666 IEM_MC_END();
1667 }
1668}
1669
1670
1671/** Opcode 0x0f 0x01 /7. */
1672FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1673{
1674 IEMOP_MNEMONIC(invlpg, "invlpg");
1675 IEMOP_HLP_MIN_486();
1676 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1677 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1680 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpg, GCPtrEffDst);
1681 IEM_MC_END();
1682}
1683
1684
1685/** Opcode 0x0f 0x01 0xf8. */
1686FNIEMOP_DEF(iemOp_Grp7_swapgs)
1687{
1688 IEMOP_MNEMONIC(swapgs, "swapgs");
1689 IEMOP_HLP_ONLY_64BIT();
1690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1691 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS), iemCImpl_swapgs);
1692}
1693
1694
1695/** Opcode 0x0f 0x01 0xf9. */
1696FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1697{
1698 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1700 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
1701 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1702 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
1703 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
1704 iemCImpl_rdtscp);
1705}
1706
1707
1708/**
1709 * Group 7 jump table, memory variant.
1710 */
1711IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1712{
1713 iemOp_Grp7_sgdt,
1714 iemOp_Grp7_sidt,
1715 iemOp_Grp7_lgdt,
1716 iemOp_Grp7_lidt,
1717 iemOp_Grp7_smsw,
1718 iemOp_InvalidWithRM,
1719 iemOp_Grp7_lmsw,
1720 iemOp_Grp7_invlpg
1721};
1722
1723
1724/** Opcode 0x0f 0x01. */
1725FNIEMOP_DEF(iemOp_Grp7)
1726{
1727 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1728 if (IEM_IS_MODRM_MEM_MODE(bRm))
1729 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1730
1731 switch (IEM_GET_MODRM_REG_8(bRm))
1732 {
1733 case 0:
1734 switch (IEM_GET_MODRM_RM_8(bRm))
1735 {
1736 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1737 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1738 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1739 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1740 }
1741 IEMOP_RAISE_INVALID_OPCODE_RET();
1742
1743 case 1:
1744 switch (IEM_GET_MODRM_RM_8(bRm))
1745 {
1746 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1747 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1748 }
1749 IEMOP_RAISE_INVALID_OPCODE_RET();
1750
1751 case 2:
1752 switch (IEM_GET_MODRM_RM_8(bRm))
1753 {
1754 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1755 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1756 }
1757 IEMOP_RAISE_INVALID_OPCODE_RET();
1758
1759 case 3:
1760 switch (IEM_GET_MODRM_RM_8(bRm))
1761 {
1762 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1763 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1764 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1765 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1766 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1767 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1768 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1769 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1770 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1771 }
1772
1773 case 4:
1774 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1775
1776 case 5:
1777 IEMOP_RAISE_INVALID_OPCODE_RET();
1778
1779 case 6:
1780 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1781
1782 case 7:
1783 switch (IEM_GET_MODRM_RM_8(bRm))
1784 {
1785 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1786 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1787 }
1788 IEMOP_RAISE_INVALID_OPCODE_RET();
1789
1790 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1791 }
1792}
1793
1794FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1795{
1796 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1798
1799 if (IEM_IS_MODRM_REG_MODE(bRm))
1800 {
1801 switch (pVCpu->iem.s.enmEffOpSize)
1802 {
1803 case IEMMODE_16BIT:
1804 IEM_MC_BEGIN(0, 0);
1805 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1806 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1807 IEM_MC_ARG(uint16_t, u16Sel, 1);
1808 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1809
1810 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1811 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1812 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1813 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1814
1815 IEM_MC_END();
1816 break;
1817
1818 case IEMMODE_32BIT:
1819 case IEMMODE_64BIT:
1820 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1821 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1822 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1823 IEM_MC_ARG(uint16_t, u16Sel, 1);
1824 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1825
1826 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1827 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1828 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1829 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1830
1831 IEM_MC_END();
1832 break;
1833
1834 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1835 }
1836 }
1837 else
1838 {
1839 switch (pVCpu->iem.s.enmEffOpSize)
1840 {
1841 case IEMMODE_16BIT:
1842 IEM_MC_BEGIN(0, 0);
1843 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1844 IEM_MC_ARG(uint16_t, u16Sel, 1);
1845 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1847
1848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1849 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1850
1851 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1852 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1853 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1854 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1855
1856 IEM_MC_END();
1857 break;
1858
1859 case IEMMODE_32BIT:
1860 case IEMMODE_64BIT:
1861 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1862 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1863 IEM_MC_ARG(uint16_t, u16Sel, 1);
1864 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1866
1867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1868 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1869/** @todo testcase: make sure it's a 16-bit read. */
1870
1871 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1872 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1873 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1874 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1875
1876 IEM_MC_END();
1877 break;
1878
1879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1880 }
1881 }
1882}
1883
1884
1885
1886/**
1887 * @opcode 0x02
1888 * @opflmodify zf
1889 */
1890FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1891{
1892 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1893 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1894}
1895
1896
1897/**
1898 * @opcode 0x03
1899 * @opflmodify zf
1900 */
1901FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1902{
1903 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1904 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1905}
1906
1907
1908/** Opcode 0x0f 0x05. */
1909FNIEMOP_DEF(iemOp_syscall)
1910{
1911 if (RT_LIKELY(pVCpu->iem.s.uTargetCpu != IEMTARGETCPU_286))
1912 {
1913 IEMOP_MNEMONIC(syscall, "syscall");
1914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1915 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1916 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0, iemCImpl_syscall);
1917 }
1918 else
1919 {
1920 IEMOP_MNEMONIC(loadall286, "loadall286");
1921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1922 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1923 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1924 RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_loadall286);
1925 }
1926}
1927
1928
1929/** Opcode 0x0f 0x06. */
1930FNIEMOP_DEF(iemOp_clts)
1931{
1932 IEMOP_MNEMONIC(clts, "clts");
1933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1934 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_clts);
1935}
1936
1937
1938/** Opcode 0x0f 0x07. */
1939FNIEMOP_DEF(iemOp_sysret)
1940{
1941 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1943 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1944 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
1945 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
1946}
1947
1948
1949/** Opcode 0x0f 0x08. */
1950FNIEMOP_DEF(iemOp_invd)
1951{
1952 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1953 IEMOP_HLP_MIN_486();
1954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1955 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invd);
1956}
1957
1958
1959/** Opcode 0x0f 0x09. */
1960FNIEMOP_DEF(iemOp_wbinvd)
1961{
1962 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1963 IEMOP_HLP_MIN_486();
1964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1965 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wbinvd);
1966}
1967
1968
1969/** Opcode 0x0f 0x0b. */
1970FNIEMOP_DEF(iemOp_ud2)
1971{
1972 IEMOP_MNEMONIC(ud2, "ud2");
1973 IEMOP_RAISE_INVALID_OPCODE_RET();
1974}
1975
1976/** Opcode 0x0f 0x0d. */
1977FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1978{
1979 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1980 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1981 {
1982 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1983 IEMOP_RAISE_INVALID_OPCODE_RET();
1984 }
1985
1986 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1987 if (IEM_IS_MODRM_REG_MODE(bRm))
1988 {
1989 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1990 IEMOP_RAISE_INVALID_OPCODE_RET();
1991 }
1992
1993 switch (IEM_GET_MODRM_REG_8(bRm))
1994 {
1995 case 2: /* Aliased to /0 for the time being. */
1996 case 4: /* Aliased to /0 for the time being. */
1997 case 5: /* Aliased to /0 for the time being. */
1998 case 6: /* Aliased to /0 for the time being. */
1999 case 7: /* Aliased to /0 for the time being. */
2000 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2001 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2002 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2003 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2004 }
2005
2006 IEM_MC_BEGIN(0, 0);
2007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2010 /* Currently a NOP. */
2011 IEM_MC_NOREF(GCPtrEffSrc);
2012 IEM_MC_ADVANCE_RIP_AND_FINISH();
2013 IEM_MC_END();
2014}
2015
2016
2017/** Opcode 0x0f 0x0e. */
2018FNIEMOP_DEF(iemOp_femms)
2019{
2020 IEMOP_MNEMONIC(femms, "femms");
2021
2022 IEM_MC_BEGIN(0, 0);
2023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2024 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2025 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2026 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2027 IEM_MC_FPU_FROM_MMX_MODE();
2028 IEM_MC_ADVANCE_RIP_AND_FINISH();
2029 IEM_MC_END();
2030}
2031
2032
2033/** Opcode 0x0f 0x0f. */
2034FNIEMOP_DEF(iemOp_3Dnow)
2035{
2036 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2037 {
2038 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2039 IEMOP_RAISE_INVALID_OPCODE_RET();
2040 }
2041
2042#ifdef IEM_WITH_3DNOW
2043 /* This is pretty sparse, use switch instead of table. */
2044 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2045 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2046#else
2047 IEMOP_BITCH_ABOUT_STUB();
2048 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2049#endif
2050}
2051
2052
2053/**
2054 * @opcode 0x10
2055 * @oppfx none
2056 * @opcpuid sse
2057 * @opgroup og_sse_simdfp_datamove
2058 * @opxcpttype 4UA
2059 * @optest op1=1 op2=2 -> op1=2
2060 * @optest op1=0 op2=-22 -> op1=-22
2061 */
2062FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2063{
2064 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2066 if (IEM_IS_MODRM_REG_MODE(bRm))
2067 {
2068 /*
2069 * XMM128, XMM128.
2070 */
2071 IEM_MC_BEGIN(0, 0);
2072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2073 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2074 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2075 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2076 IEM_GET_MODRM_RM(pVCpu, bRm));
2077 IEM_MC_ADVANCE_RIP_AND_FINISH();
2078 IEM_MC_END();
2079 }
2080 else
2081 {
2082 /*
2083 * XMM128, [mem128].
2084 */
2085 IEM_MC_BEGIN(0, 0);
2086 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2088
2089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2091 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2092 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2093
2094 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2095 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2096
2097 IEM_MC_ADVANCE_RIP_AND_FINISH();
2098 IEM_MC_END();
2099 }
2100
2101}
2102
2103
2104/**
2105 * @opcode 0x10
2106 * @oppfx 0x66
2107 * @opcpuid sse2
2108 * @opgroup og_sse2_pcksclr_datamove
2109 * @opxcpttype 4UA
2110 * @optest op1=1 op2=2 -> op1=2
2111 * @optest op1=0 op2=-42 -> op1=-42
2112 */
2113FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2114{
2115 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2117 if (IEM_IS_MODRM_REG_MODE(bRm))
2118 {
2119 /*
2120 * XMM128, XMM128.
2121 */
2122 IEM_MC_BEGIN(0, 0);
2123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2124 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2125 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2126 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2127 IEM_GET_MODRM_RM(pVCpu, bRm));
2128 IEM_MC_ADVANCE_RIP_AND_FINISH();
2129 IEM_MC_END();
2130 }
2131 else
2132 {
2133 /*
2134 * XMM128, [mem128].
2135 */
2136 IEM_MC_BEGIN(0, 0);
2137 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2139
2140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2142 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2143 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2144
2145 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2146 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2147
2148 IEM_MC_ADVANCE_RIP_AND_FINISH();
2149 IEM_MC_END();
2150 }
2151}
2152
2153
2154/**
2155 * @opcode 0x10
2156 * @oppfx 0xf3
2157 * @opcpuid sse
2158 * @opgroup og_sse_simdfp_datamove
2159 * @opxcpttype 5
2160 * @optest op1=1 op2=2 -> op1=2
2161 * @optest op1=0 op2=-22 -> op1=-22
2162 */
2163FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2164{
2165 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2167 if (IEM_IS_MODRM_REG_MODE(bRm))
2168 {
2169 /*
2170 * XMM32, XMM32.
2171 */
2172 IEM_MC_BEGIN(0, 0);
2173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2174 IEM_MC_LOCAL(uint32_t, uSrc);
2175
2176 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2177 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2178 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2179 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2180
2181 IEM_MC_ADVANCE_RIP_AND_FINISH();
2182 IEM_MC_END();
2183 }
2184 else
2185 {
2186 /*
2187 * XMM128, [mem32].
2188 */
2189 IEM_MC_BEGIN(0, 0);
2190 IEM_MC_LOCAL(uint32_t, uSrc);
2191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2192
2193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2195 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2196 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2197
2198 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2199 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2200
2201 IEM_MC_ADVANCE_RIP_AND_FINISH();
2202 IEM_MC_END();
2203 }
2204}
2205
2206
2207/**
2208 * @opcode 0x10
2209 * @oppfx 0xf2
2210 * @opcpuid sse2
2211 * @opgroup og_sse2_pcksclr_datamove
2212 * @opxcpttype 5
2213 * @optest op1=1 op2=2 -> op1=2
2214 * @optest op1=0 op2=-42 -> op1=-42
2215 */
2216FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2217{
2218 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2220 if (IEM_IS_MODRM_REG_MODE(bRm))
2221 {
2222 /*
2223 * XMM64, XMM64.
2224 */
2225 IEM_MC_BEGIN(0, 0);
2226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2227 IEM_MC_LOCAL(uint64_t, uSrc);
2228
2229 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2230 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2231 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2232 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2233
2234 IEM_MC_ADVANCE_RIP_AND_FINISH();
2235 IEM_MC_END();
2236 }
2237 else
2238 {
2239 /*
2240 * XMM128, [mem64].
2241 */
2242 IEM_MC_BEGIN(0, 0);
2243 IEM_MC_LOCAL(uint64_t, uSrc);
2244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2245
2246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2248 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2249 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2250
2251 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2252 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2253
2254 IEM_MC_ADVANCE_RIP_AND_FINISH();
2255 IEM_MC_END();
2256 }
2257}
2258
2259
2260/**
2261 * @opcode 0x11
2262 * @oppfx none
2263 * @opcpuid sse
2264 * @opgroup og_sse_simdfp_datamove
2265 * @opxcpttype 4UA
2266 * @optest op1=1 op2=2 -> op1=2
2267 * @optest op1=0 op2=-42 -> op1=-42
2268 */
2269FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2270{
2271 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2272 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2273 if (IEM_IS_MODRM_REG_MODE(bRm))
2274 {
2275 /*
2276 * XMM128, XMM128.
2277 */
2278 IEM_MC_BEGIN(0, 0);
2279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2280 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2281 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2282 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2283 IEM_GET_MODRM_REG(pVCpu, bRm));
2284 IEM_MC_ADVANCE_RIP_AND_FINISH();
2285 IEM_MC_END();
2286 }
2287 else
2288 {
2289 /*
2290 * [mem128], XMM128.
2291 */
2292 IEM_MC_BEGIN(0, 0);
2293 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2295
2296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2298 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2299 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2300
2301 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2302 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2303
2304 IEM_MC_ADVANCE_RIP_AND_FINISH();
2305 IEM_MC_END();
2306 }
2307}
2308
2309
2310/**
2311 * @opcode 0x11
2312 * @oppfx 0x66
2313 * @opcpuid sse2
2314 * @opgroup og_sse2_pcksclr_datamove
2315 * @opxcpttype 4UA
2316 * @optest op1=1 op2=2 -> op1=2
2317 * @optest op1=0 op2=-42 -> op1=-42
2318 */
2319FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2320{
2321 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2322 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2323 if (IEM_IS_MODRM_REG_MODE(bRm))
2324 {
2325 /*
2326 * XMM128, XMM128.
2327 */
2328 IEM_MC_BEGIN(0, 0);
2329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2330 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2331 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2332 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2333 IEM_GET_MODRM_REG(pVCpu, bRm));
2334 IEM_MC_ADVANCE_RIP_AND_FINISH();
2335 IEM_MC_END();
2336 }
2337 else
2338 {
2339 /*
2340 * [mem128], XMM128.
2341 */
2342 IEM_MC_BEGIN(0, 0);
2343 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2345
2346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2348 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2349 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2350
2351 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2352 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2353
2354 IEM_MC_ADVANCE_RIP_AND_FINISH();
2355 IEM_MC_END();
2356 }
2357}
2358
2359
2360/**
2361 * @opcode 0x11
2362 * @oppfx 0xf3
2363 * @opcpuid sse
2364 * @opgroup og_sse_simdfp_datamove
2365 * @opxcpttype 5
2366 * @optest op1=1 op2=2 -> op1=2
2367 * @optest op1=0 op2=-22 -> op1=-22
2368 */
2369FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2370{
2371 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2372 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2373 if (IEM_IS_MODRM_REG_MODE(bRm))
2374 {
2375 /*
2376 * XMM32, XMM32.
2377 */
2378 IEM_MC_BEGIN(0, 0);
2379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2380 IEM_MC_LOCAL(uint32_t, uSrc);
2381
2382 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2383 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2384 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2385 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2386
2387 IEM_MC_ADVANCE_RIP_AND_FINISH();
2388 IEM_MC_END();
2389 }
2390 else
2391 {
2392 /*
2393 * [mem32], XMM32.
2394 */
2395 IEM_MC_BEGIN(0, 0);
2396 IEM_MC_LOCAL(uint32_t, uSrc);
2397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2398
2399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2401 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2402 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2403
2404 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2405 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2406
2407 IEM_MC_ADVANCE_RIP_AND_FINISH();
2408 IEM_MC_END();
2409 }
2410}
2411
2412
2413/**
2414 * @opcode 0x11
2415 * @oppfx 0xf2
2416 * @opcpuid sse2
2417 * @opgroup og_sse2_pcksclr_datamove
2418 * @opxcpttype 5
2419 * @optest op1=1 op2=2 -> op1=2
2420 * @optest op1=0 op2=-42 -> op1=-42
2421 */
2422FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2423{
2424 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2425 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2426 if (IEM_IS_MODRM_REG_MODE(bRm))
2427 {
2428 /*
2429 * XMM64, XMM64.
2430 */
2431 IEM_MC_BEGIN(0, 0);
2432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2433 IEM_MC_LOCAL(uint64_t, uSrc);
2434
2435 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2436 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2437 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2438 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2439
2440 IEM_MC_ADVANCE_RIP_AND_FINISH();
2441 IEM_MC_END();
2442 }
2443 else
2444 {
2445 /*
2446 * [mem64], XMM64.
2447 */
2448 IEM_MC_BEGIN(0, 0);
2449 IEM_MC_LOCAL(uint64_t, uSrc);
2450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2451
2452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2454 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2455 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2456
2457 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2458 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2459
2460 IEM_MC_ADVANCE_RIP_AND_FINISH();
2461 IEM_MC_END();
2462 }
2463}
2464
2465
2466FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2467{
2468 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2469 if (IEM_IS_MODRM_REG_MODE(bRm))
2470 {
2471 /**
2472 * @opcode 0x12
2473 * @opcodesub 11 mr/reg
2474 * @oppfx none
2475 * @opcpuid sse
2476 * @opgroup og_sse_simdfp_datamove
2477 * @opxcpttype 5
2478 * @optest op1=1 op2=2 -> op1=2
2479 * @optest op1=0 op2=-42 -> op1=-42
2480 */
2481 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2482
2483 IEM_MC_BEGIN(0, 0);
2484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2485 IEM_MC_LOCAL(uint64_t, uSrc);
2486
2487 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2488 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2489 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2490 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2491
2492 IEM_MC_ADVANCE_RIP_AND_FINISH();
2493 IEM_MC_END();
2494 }
2495 else
2496 {
2497 /**
2498 * @opdone
2499 * @opcode 0x12
2500 * @opcodesub !11 mr/reg
2501 * @oppfx none
2502 * @opcpuid sse
2503 * @opgroup og_sse_simdfp_datamove
2504 * @opxcpttype 5
2505 * @optest op1=1 op2=2 -> op1=2
2506 * @optest op1=0 op2=-42 -> op1=-42
2507 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2508 */
2509 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2510
2511 IEM_MC_BEGIN(0, 0);
2512 IEM_MC_LOCAL(uint64_t, uSrc);
2513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2514
2515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2517 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2518 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2519
2520 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2521 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2522
2523 IEM_MC_ADVANCE_RIP_AND_FINISH();
2524 IEM_MC_END();
2525 }
2526}
2527
2528
2529/**
2530 * @opcode 0x12
2531 * @opcodesub !11 mr/reg
2532 * @oppfx 0x66
2533 * @opcpuid sse2
2534 * @opgroup og_sse2_pcksclr_datamove
2535 * @opxcpttype 5
2536 * @optest op1=1 op2=2 -> op1=2
2537 * @optest op1=0 op2=-42 -> op1=-42
2538 */
2539FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2540{
2541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2542 if (IEM_IS_MODRM_MEM_MODE(bRm))
2543 {
2544 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2545
2546 IEM_MC_BEGIN(0, 0);
2547 IEM_MC_LOCAL(uint64_t, uSrc);
2548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2549
2550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2552 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2553 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2554
2555 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2556 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2557
2558 IEM_MC_ADVANCE_RIP_AND_FINISH();
2559 IEM_MC_END();
2560 }
2561
2562 /**
2563 * @opdone
2564 * @opmnemonic ud660f12m3
2565 * @opcode 0x12
2566 * @opcodesub 11 mr/reg
2567 * @oppfx 0x66
2568 * @opunused immediate
2569 * @opcpuid sse
2570 * @optest ->
2571 */
2572 else
2573 IEMOP_RAISE_INVALID_OPCODE_RET();
2574}
2575
2576
2577/**
2578 * @opcode 0x12
2579 * @oppfx 0xf3
2580 * @opcpuid sse3
2581 * @opgroup og_sse3_pcksclr_datamove
2582 * @opxcpttype 4
2583 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2584 * op1=0x00000002000000020000000100000001
2585 */
2586FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2587{
2588 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2589 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2590 if (IEM_IS_MODRM_REG_MODE(bRm))
2591 {
2592 /*
2593 * XMM, XMM.
2594 */
2595 IEM_MC_BEGIN(0, 0);
2596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2597 IEM_MC_LOCAL(RTUINT128U, uSrc);
2598
2599 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2600 IEM_MC_PREPARE_SSE_USAGE();
2601
2602 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2603 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2604 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2605 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2606 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2607
2608 IEM_MC_ADVANCE_RIP_AND_FINISH();
2609 IEM_MC_END();
2610 }
2611 else
2612 {
2613 /*
2614 * XMM, [mem128].
2615 */
2616 IEM_MC_BEGIN(0, 0);
2617 IEM_MC_LOCAL(RTUINT128U, uSrc);
2618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2619
2620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2622 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2623 IEM_MC_PREPARE_SSE_USAGE();
2624
2625 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2626 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2627 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2628 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2629 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2630
2631 IEM_MC_ADVANCE_RIP_AND_FINISH();
2632 IEM_MC_END();
2633 }
2634}
2635
2636
2637/**
2638 * @opcode 0x12
2639 * @oppfx 0xf2
2640 * @opcpuid sse3
2641 * @opgroup og_sse3_pcksclr_datamove
2642 * @opxcpttype 5
2643 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2644 * op1=0x22222222111111112222222211111111
2645 */
2646FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2647{
2648 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2649 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2650 if (IEM_IS_MODRM_REG_MODE(bRm))
2651 {
2652 /*
2653 * XMM128, XMM64.
2654 */
2655 IEM_MC_BEGIN(0, 0);
2656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2657 IEM_MC_LOCAL(uint64_t, uSrc);
2658
2659 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2660 IEM_MC_PREPARE_SSE_USAGE();
2661
2662 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2663 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2664 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2665
2666 IEM_MC_ADVANCE_RIP_AND_FINISH();
2667 IEM_MC_END();
2668 }
2669 else
2670 {
2671 /*
2672 * XMM128, [mem64].
2673 */
2674 IEM_MC_BEGIN(0, 0);
2675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2676 IEM_MC_LOCAL(uint64_t, uSrc);
2677
2678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2680 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2681 IEM_MC_PREPARE_SSE_USAGE();
2682
2683 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2684 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2685 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2686
2687 IEM_MC_ADVANCE_RIP_AND_FINISH();
2688 IEM_MC_END();
2689 }
2690}
2691
2692
2693/**
2694 * @opcode 0x13
2695 * @opcodesub !11 mr/reg
2696 * @oppfx none
2697 * @opcpuid sse
2698 * @opgroup og_sse_simdfp_datamove
2699 * @opxcpttype 5
2700 * @optest op1=1 op2=2 -> op1=2
2701 * @optest op1=0 op2=-42 -> op1=-42
2702 */
2703FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2704{
2705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2706 if (IEM_IS_MODRM_MEM_MODE(bRm))
2707 {
2708 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2709
2710 IEM_MC_BEGIN(0, 0);
2711 IEM_MC_LOCAL(uint64_t, uSrc);
2712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2713
2714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2716 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2717 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2718
2719 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2720 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2721
2722 IEM_MC_ADVANCE_RIP_AND_FINISH();
2723 IEM_MC_END();
2724 }
2725
2726 /**
2727 * @opdone
2728 * @opmnemonic ud0f13m3
2729 * @opcode 0x13
2730 * @opcodesub 11 mr/reg
2731 * @oppfx none
2732 * @opunused immediate
2733 * @opcpuid sse
2734 * @optest ->
2735 */
2736 else
2737 IEMOP_RAISE_INVALID_OPCODE_RET();
2738}
2739
2740
2741/**
2742 * @opcode 0x13
2743 * @opcodesub !11 mr/reg
2744 * @oppfx 0x66
2745 * @opcpuid sse2
2746 * @opgroup og_sse2_pcksclr_datamove
2747 * @opxcpttype 5
2748 * @optest op1=1 op2=2 -> op1=2
2749 * @optest op1=0 op2=-42 -> op1=-42
2750 */
2751FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2752{
2753 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2754 if (IEM_IS_MODRM_MEM_MODE(bRm))
2755 {
2756 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2757
2758 IEM_MC_BEGIN(0, 0);
2759 IEM_MC_LOCAL(uint64_t, uSrc);
2760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2761
2762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2764 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2765 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2766
2767 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2768 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2769
2770 IEM_MC_ADVANCE_RIP_AND_FINISH();
2771 IEM_MC_END();
2772 }
2773
2774 /**
2775 * @opdone
2776 * @opmnemonic ud660f13m3
2777 * @opcode 0x13
2778 * @opcodesub 11 mr/reg
2779 * @oppfx 0x66
2780 * @opunused immediate
2781 * @opcpuid sse
2782 * @optest ->
2783 */
2784 else
2785 IEMOP_RAISE_INVALID_OPCODE_RET();
2786}
2787
2788
2789/**
2790 * @opmnemonic udf30f13
2791 * @opcode 0x13
2792 * @oppfx 0xf3
2793 * @opunused intel-modrm
2794 * @opcpuid sse
2795 * @optest ->
2796 * @opdone
2797 */
2798
2799/**
2800 * @opmnemonic udf20f13
2801 * @opcode 0x13
2802 * @oppfx 0xf2
2803 * @opunused intel-modrm
2804 * @opcpuid sse
2805 * @optest ->
2806 * @opdone
2807 */
2808
2809/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2810FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2811{
2812 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2813 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2814}
2815
2816
2817/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2818FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2819{
2820 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2821 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2822}
2823
2824
2825/**
2826 * @opdone
2827 * @opmnemonic udf30f14
2828 * @opcode 0x14
2829 * @oppfx 0xf3
2830 * @opunused intel-modrm
2831 * @opcpuid sse
2832 * @optest ->
2833 * @opdone
2834 */
2835
2836/**
2837 * @opmnemonic udf20f14
2838 * @opcode 0x14
2839 * @oppfx 0xf2
2840 * @opunused intel-modrm
2841 * @opcpuid sse
2842 * @optest ->
2843 * @opdone
2844 */
2845
2846/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2847FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2848{
2849 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2850 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2851}
2852
2853
2854/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2855FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2856{
2857 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2858 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2859}
2860
2861
2862/* Opcode 0xf3 0x0f 0x15 - invalid */
2863/* Opcode 0xf2 0x0f 0x15 - invalid */
2864
2865/**
2866 * @opdone
2867 * @opmnemonic udf30f15
2868 * @opcode 0x15
2869 * @oppfx 0xf3
2870 * @opunused intel-modrm
2871 * @opcpuid sse
2872 * @optest ->
2873 * @opdone
2874 */
2875
2876/**
2877 * @opmnemonic udf20f15
2878 * @opcode 0x15
2879 * @oppfx 0xf2
2880 * @opunused intel-modrm
2881 * @opcpuid sse
2882 * @optest ->
2883 * @opdone
2884 */
2885
2886FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2887{
2888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2889 if (IEM_IS_MODRM_REG_MODE(bRm))
2890 {
2891 /**
2892 * @opcode 0x16
2893 * @opcodesub 11 mr/reg
2894 * @oppfx none
2895 * @opcpuid sse
2896 * @opgroup og_sse_simdfp_datamove
2897 * @opxcpttype 5
2898 * @optest op1=1 op2=2 -> op1=2
2899 * @optest op1=0 op2=-42 -> op1=-42
2900 */
2901 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2902
2903 IEM_MC_BEGIN(0, 0);
2904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2905 IEM_MC_LOCAL(uint64_t, uSrc);
2906
2907 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2908 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2909 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2910 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2911
2912 IEM_MC_ADVANCE_RIP_AND_FINISH();
2913 IEM_MC_END();
2914 }
2915 else
2916 {
2917 /**
2918 * @opdone
2919 * @opcode 0x16
2920 * @opcodesub !11 mr/reg
2921 * @oppfx none
2922 * @opcpuid sse
2923 * @opgroup og_sse_simdfp_datamove
2924 * @opxcpttype 5
2925 * @optest op1=1 op2=2 -> op1=2
2926 * @optest op1=0 op2=-42 -> op1=-42
2927 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2928 */
2929 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2930
2931 IEM_MC_BEGIN(0, 0);
2932 IEM_MC_LOCAL(uint64_t, uSrc);
2933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2934
2935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2937 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2938 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2939
2940 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2941 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2942
2943 IEM_MC_ADVANCE_RIP_AND_FINISH();
2944 IEM_MC_END();
2945 }
2946}
2947
2948
2949/**
2950 * @opcode 0x16
2951 * @opcodesub !11 mr/reg
2952 * @oppfx 0x66
2953 * @opcpuid sse2
2954 * @opgroup og_sse2_pcksclr_datamove
2955 * @opxcpttype 5
2956 * @optest op1=1 op2=2 -> op1=2
2957 * @optest op1=0 op2=-42 -> op1=-42
2958 */
2959FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2960{
2961 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2962 if (IEM_IS_MODRM_MEM_MODE(bRm))
2963 {
2964 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2965
2966 IEM_MC_BEGIN(0, 0);
2967 IEM_MC_LOCAL(uint64_t, uSrc);
2968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2969
2970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2972 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2973 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2974
2975 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2976 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2977
2978 IEM_MC_ADVANCE_RIP_AND_FINISH();
2979 IEM_MC_END();
2980 }
2981
2982 /**
2983 * @opdone
2984 * @opmnemonic ud660f16m3
2985 * @opcode 0x16
2986 * @opcodesub 11 mr/reg
2987 * @oppfx 0x66
2988 * @opunused immediate
2989 * @opcpuid sse
2990 * @optest ->
2991 */
2992 else
2993 IEMOP_RAISE_INVALID_OPCODE_RET();
2994}
2995
2996
2997/**
2998 * @opcode 0x16
2999 * @oppfx 0xf3
3000 * @opcpuid sse3
3001 * @opgroup og_sse3_pcksclr_datamove
3002 * @opxcpttype 4
3003 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3004 * op1=0x00000002000000020000000100000001
3005 */
3006FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3007{
3008 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3009 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3010 if (IEM_IS_MODRM_REG_MODE(bRm))
3011 {
3012 /*
3013 * XMM128, XMM128.
3014 */
3015 IEM_MC_BEGIN(0, 0);
3016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3017 IEM_MC_LOCAL(RTUINT128U, uSrc);
3018
3019 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3020 IEM_MC_PREPARE_SSE_USAGE();
3021
3022 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3023 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3024 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3025 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3026 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3027
3028 IEM_MC_ADVANCE_RIP_AND_FINISH();
3029 IEM_MC_END();
3030 }
3031 else
3032 {
3033 /*
3034 * XMM128, [mem128].
3035 */
3036 IEM_MC_BEGIN(0, 0);
3037 IEM_MC_LOCAL(RTUINT128U, uSrc);
3038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3039
3040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3042 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3043 IEM_MC_PREPARE_SSE_USAGE();
3044
3045 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3046 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3047 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3048 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3049 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3050
3051 IEM_MC_ADVANCE_RIP_AND_FINISH();
3052 IEM_MC_END();
3053 }
3054}
3055
3056/**
3057 * @opdone
3058 * @opmnemonic udf30f16
3059 * @opcode 0x16
3060 * @oppfx 0xf2
3061 * @opunused intel-modrm
3062 * @opcpuid sse
3063 * @optest ->
3064 * @opdone
3065 */
3066
3067
3068/**
3069 * @opcode 0x17
3070 * @opcodesub !11 mr/reg
3071 * @oppfx none
3072 * @opcpuid sse
3073 * @opgroup og_sse_simdfp_datamove
3074 * @opxcpttype 5
3075 * @optest op1=1 op2=2 -> op1=2
3076 * @optest op1=0 op2=-42 -> op1=-42
3077 */
3078FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3079{
3080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3081 if (IEM_IS_MODRM_MEM_MODE(bRm))
3082 {
3083 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3084
3085 IEM_MC_BEGIN(0, 0);
3086 IEM_MC_LOCAL(uint64_t, uSrc);
3087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3088
3089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3091 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3092 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3093
3094 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3095 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3096
3097 IEM_MC_ADVANCE_RIP_AND_FINISH();
3098 IEM_MC_END();
3099 }
3100
3101 /**
3102 * @opdone
3103 * @opmnemonic ud0f17m3
3104 * @opcode 0x17
3105 * @opcodesub 11 mr/reg
3106 * @oppfx none
3107 * @opunused immediate
3108 * @opcpuid sse
3109 * @optest ->
3110 */
3111 else
3112 IEMOP_RAISE_INVALID_OPCODE_RET();
3113}
3114
3115
3116/**
3117 * @opcode 0x17
3118 * @opcodesub !11 mr/reg
3119 * @oppfx 0x66
3120 * @opcpuid sse2
3121 * @opgroup og_sse2_pcksclr_datamove
3122 * @opxcpttype 5
3123 * @optest op1=1 op2=2 -> op1=2
3124 * @optest op1=0 op2=-42 -> op1=-42
3125 */
3126FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3127{
3128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3129 if (IEM_IS_MODRM_MEM_MODE(bRm))
3130 {
3131 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3132
3133 IEM_MC_BEGIN(0, 0);
3134 IEM_MC_LOCAL(uint64_t, uSrc);
3135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3136
3137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3139 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3140 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3141
3142 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3143 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3144
3145 IEM_MC_ADVANCE_RIP_AND_FINISH();
3146 IEM_MC_END();
3147 }
3148
3149 /**
3150 * @opdone
3151 * @opmnemonic ud660f17m3
3152 * @opcode 0x17
3153 * @opcodesub 11 mr/reg
3154 * @oppfx 0x66
3155 * @opunused immediate
3156 * @opcpuid sse
3157 * @optest ->
3158 */
3159 else
3160 IEMOP_RAISE_INVALID_OPCODE_RET();
3161}
3162
3163
3164/**
3165 * @opdone
3166 * @opmnemonic udf30f17
3167 * @opcode 0x17
3168 * @oppfx 0xf3
3169 * @opunused intel-modrm
3170 * @opcpuid sse
3171 * @optest ->
3172 * @opdone
3173 */
3174
3175/**
3176 * @opmnemonic udf20f17
3177 * @opcode 0x17
3178 * @oppfx 0xf2
3179 * @opunused intel-modrm
3180 * @opcpuid sse
3181 * @optest ->
3182 * @opdone
3183 */
3184
3185
3186/** Opcode 0x0f 0x18. */
3187FNIEMOP_DEF(iemOp_prefetch_Grp16)
3188{
3189 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3190 if (IEM_IS_MODRM_MEM_MODE(bRm))
3191 {
3192 switch (IEM_GET_MODRM_REG_8(bRm))
3193 {
3194 case 4: /* Aliased to /0 for the time being according to AMD. */
3195 case 5: /* Aliased to /0 for the time being according to AMD. */
3196 case 6: /* Aliased to /0 for the time being according to AMD. */
3197 case 7: /* Aliased to /0 for the time being according to AMD. */
3198 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3199 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3200 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3201 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3203 }
3204
3205 IEM_MC_BEGIN(0, 0);
3206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3209 /* Currently a NOP. */
3210 IEM_MC_NOREF(GCPtrEffSrc);
3211 IEM_MC_ADVANCE_RIP_AND_FINISH();
3212 IEM_MC_END();
3213 }
3214 else
3215 IEMOP_RAISE_INVALID_OPCODE_RET();
3216}
3217
3218
3219/** Opcode 0x0f 0x19..0x1f. */
3220FNIEMOP_DEF(iemOp_nop_Ev)
3221{
3222 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3223 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3224 if (IEM_IS_MODRM_REG_MODE(bRm))
3225 {
3226 IEM_MC_BEGIN(0, 0);
3227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3228 IEM_MC_ADVANCE_RIP_AND_FINISH();
3229 IEM_MC_END();
3230 }
3231 else
3232 {
3233 IEM_MC_BEGIN(0, 0);
3234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3237 /* Currently a NOP. */
3238 IEM_MC_NOREF(GCPtrEffSrc);
3239 IEM_MC_ADVANCE_RIP_AND_FINISH();
3240 IEM_MC_END();
3241 }
3242}
3243
3244
3245/** Opcode 0x0f 0x20. */
3246FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3247{
3248 /* mod is ignored, as is operand size overrides. */
3249/** @todo testcase: check memory encoding. */
3250 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3251 IEMOP_HLP_MIN_386();
3252 if (IEM_IS_64BIT_CODE(pVCpu))
3253 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3254 else
3255 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3256
3257 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3258 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3259 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3260 {
3261 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3262 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3263 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3264 iCrReg |= 8;
3265 }
3266 switch (iCrReg)
3267 {
3268 case 0: case 2: case 3: case 4: case 8:
3269 break;
3270 default:
3271 IEMOP_RAISE_INVALID_OPCODE_RET();
3272 }
3273 IEMOP_HLP_DONE_DECODING();
3274
3275 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3276 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3277 iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3278}
3279
3280
3281/** Opcode 0x0f 0x21. */
3282FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3283{
3284/** @todo testcase: check memory encoding. */
3285 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3286 IEMOP_HLP_MIN_386();
3287 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3289 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3290 IEMOP_RAISE_INVALID_OPCODE_RET();
3291 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3292 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3293 iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3294}
3295
3296
3297/** Opcode 0x0f 0x22. */
3298FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3299{
3300 /* mod is ignored, as is operand size overrides. */
3301 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3302 IEMOP_HLP_MIN_386();
3303 if (IEM_IS_64BIT_CODE(pVCpu))
3304 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3305 else
3306 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3307
3308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3309 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3310 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3311 {
3312 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3313 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3314 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3315 iCrReg |= 8;
3316 }
3317 switch (iCrReg)
3318 {
3319 case 0: case 2: case 3: case 4: case 8:
3320 break;
3321 default:
3322 IEMOP_RAISE_INVALID_OPCODE_RET();
3323 }
3324 IEMOP_HLP_DONE_DECODING();
3325
3326 /** @todo r=aeichner Split this up as flushing the cr0 is excessive for crX != 0? */
3327 if (iCrReg & (2 | 8))
3328 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, 0,
3329 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3330 else
3331 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0) | RT_BIT_64(kIemNativeGstReg_Cr4),
3332 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3333}
3334
3335
3336/** Opcode 0x0f 0x23. */
3337FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3338{
3339 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3340 IEMOP_HLP_MIN_386();
3341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3343 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3344 IEMOP_RAISE_INVALID_OPCODE_RET();
3345 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, 0,
3346 iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3347}
3348
3349
3350/** Opcode 0x0f 0x24. */
3351FNIEMOP_DEF(iemOp_mov_Rd_Td)
3352{
3353 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3354 IEMOP_HLP_MIN_386();
3355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3357 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3358 IEMOP_RAISE_INVALID_OPCODE_RET();
3359 IEM_MC_DEFER_TO_CIMPL_2_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3360 iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3361}
3362
3363
3364/** Opcode 0x0f 0x26. */
3365FNIEMOP_DEF(iemOp_mov_Td_Rd)
3366{
3367 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3368 IEMOP_HLP_MIN_386();
3369 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3371 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3372 IEMOP_RAISE_INVALID_OPCODE_RET();
3373 IEM_MC_DEFER_TO_CIMPL_2_RET(0, 0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3374}
3375
3376
3377/**
3378 * @opcode 0x28
3379 * @oppfx none
3380 * @opcpuid sse
3381 * @opgroup og_sse_simdfp_datamove
3382 * @opxcpttype 1
3383 * @optest op1=1 op2=2 -> op1=2
3384 * @optest op1=0 op2=-42 -> op1=-42
3385 */
3386FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3387{
3388 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3390 if (IEM_IS_MODRM_REG_MODE(bRm))
3391 {
3392 /*
3393 * Register, register.
3394 */
3395 IEM_MC_BEGIN(0, 0);
3396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3397 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3398 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3399 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3400 IEM_GET_MODRM_RM(pVCpu, bRm));
3401 IEM_MC_ADVANCE_RIP_AND_FINISH();
3402 IEM_MC_END();
3403 }
3404 else
3405 {
3406 /*
3407 * Register, memory.
3408 */
3409 IEM_MC_BEGIN(0, 0);
3410 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3412
3413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3415 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3416 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3417
3418 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3419 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3420
3421 IEM_MC_ADVANCE_RIP_AND_FINISH();
3422 IEM_MC_END();
3423 }
3424}
3425
3426/**
3427 * @opcode 0x28
3428 * @oppfx 66
3429 * @opcpuid sse2
3430 * @opgroup og_sse2_pcksclr_datamove
3431 * @opxcpttype 1
3432 * @optest op1=1 op2=2 -> op1=2
3433 * @optest op1=0 op2=-42 -> op1=-42
3434 */
3435FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3436{
3437 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3438 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3439 if (IEM_IS_MODRM_REG_MODE(bRm))
3440 {
3441 /*
3442 * Register, register.
3443 */
3444 IEM_MC_BEGIN(0, 0);
3445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3446 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3447 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3448 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3449 IEM_GET_MODRM_RM(pVCpu, bRm));
3450 IEM_MC_ADVANCE_RIP_AND_FINISH();
3451 IEM_MC_END();
3452 }
3453 else
3454 {
3455 /*
3456 * Register, memory.
3457 */
3458 IEM_MC_BEGIN(0, 0);
3459 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3461
3462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3466
3467 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3468 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3469
3470 IEM_MC_ADVANCE_RIP_AND_FINISH();
3471 IEM_MC_END();
3472 }
3473}
3474
3475/* Opcode 0xf3 0x0f 0x28 - invalid */
3476/* Opcode 0xf2 0x0f 0x28 - invalid */
3477
3478/**
3479 * @opcode 0x29
3480 * @oppfx none
3481 * @opcpuid sse
3482 * @opgroup og_sse_simdfp_datamove
3483 * @opxcpttype 1
3484 * @optest op1=1 op2=2 -> op1=2
3485 * @optest op1=0 op2=-42 -> op1=-42
3486 */
3487FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3488{
3489 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3491 if (IEM_IS_MODRM_REG_MODE(bRm))
3492 {
3493 /*
3494 * Register, register.
3495 */
3496 IEM_MC_BEGIN(0, 0);
3497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3498 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3499 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3500 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3501 IEM_GET_MODRM_REG(pVCpu, bRm));
3502 IEM_MC_ADVANCE_RIP_AND_FINISH();
3503 IEM_MC_END();
3504 }
3505 else
3506 {
3507 /*
3508 * Memory, register.
3509 */
3510 IEM_MC_BEGIN(0, 0);
3511 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3513
3514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3516 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3517 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3518
3519 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3520 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3521
3522 IEM_MC_ADVANCE_RIP_AND_FINISH();
3523 IEM_MC_END();
3524 }
3525}
3526
3527/**
3528 * @opcode 0x29
3529 * @oppfx 66
3530 * @opcpuid sse2
3531 * @opgroup og_sse2_pcksclr_datamove
3532 * @opxcpttype 1
3533 * @optest op1=1 op2=2 -> op1=2
3534 * @optest op1=0 op2=-42 -> op1=-42
3535 */
3536FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3537{
3538 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3540 if (IEM_IS_MODRM_REG_MODE(bRm))
3541 {
3542 /*
3543 * Register, register.
3544 */
3545 IEM_MC_BEGIN(0, 0);
3546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3547 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3548 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3549 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3550 IEM_GET_MODRM_REG(pVCpu, bRm));
3551 IEM_MC_ADVANCE_RIP_AND_FINISH();
3552 IEM_MC_END();
3553 }
3554 else
3555 {
3556 /*
3557 * Memory, register.
3558 */
3559 IEM_MC_BEGIN(0, 0);
3560 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3562
3563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3565 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3566 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3567
3568 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3569 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3570
3571 IEM_MC_ADVANCE_RIP_AND_FINISH();
3572 IEM_MC_END();
3573 }
3574}
3575
3576/* Opcode 0xf3 0x0f 0x29 - invalid */
3577/* Opcode 0xf2 0x0f 0x29 - invalid */
3578
3579
3580/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3581FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3582{
3583 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3585 if (IEM_IS_MODRM_REG_MODE(bRm))
3586 {
3587 /*
3588 * XMM, MMX
3589 */
3590 IEM_MC_BEGIN(0, 0);
3591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3592 IEM_MC_LOCAL(X86XMMREG, Dst);
3593 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3594 IEM_MC_ARG(uint64_t, u64Src, 1);
3595 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3596 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3597 IEM_MC_PREPARE_FPU_USAGE();
3598 IEM_MC_FPU_TO_MMX_MODE();
3599
3600 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3601 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3602
3603 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2ps_u128, pDst, u64Src);
3604 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3605
3606 IEM_MC_ADVANCE_RIP_AND_FINISH();
3607 IEM_MC_END();
3608 }
3609 else
3610 {
3611 /*
3612 * XMM, [mem64]
3613 */
3614 IEM_MC_BEGIN(0, 0);
3615 IEM_MC_LOCAL(X86XMMREG, Dst);
3616 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3617 IEM_MC_ARG(uint64_t, u64Src, 1);
3618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3619
3620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3622 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3623
3624 IEM_MC_PREPARE_FPU_USAGE();
3625
3626 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3627 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3628
3629 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2ps_u128, pDst, u64Src);
3630 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3631
3632 IEM_MC_ADVANCE_RIP_AND_FINISH();
3633 IEM_MC_END();
3634 }
3635}
3636
3637
3638/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3639FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3640{
3641 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3643 if (IEM_IS_MODRM_REG_MODE(bRm))
3644 {
3645 /*
3646 * XMM, MMX
3647 */
3648 IEM_MC_BEGIN(0, 0);
3649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3650 IEM_MC_LOCAL(X86XMMREG, Dst);
3651 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3652 IEM_MC_ARG(uint64_t, u64Src, 1);
3653 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3654 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3655 IEM_MC_PREPARE_FPU_USAGE();
3656 IEM_MC_FPU_TO_MMX_MODE();
3657
3658 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3659
3660 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2pd_u128, pDst, u64Src);
3661 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3662
3663 IEM_MC_ADVANCE_RIP_AND_FINISH();
3664 IEM_MC_END();
3665 }
3666 else
3667 {
3668 /*
3669 * XMM, [mem64]
3670 */
3671 IEM_MC_BEGIN(0, 0);
3672 IEM_MC_LOCAL(X86XMMREG, Dst);
3673 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3674 IEM_MC_ARG(uint64_t, u64Src, 1);
3675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3676
3677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3679 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3680 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3681 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3682
3683 /* Doesn't cause a transition to MMX mode. */
3684 IEM_MC_PREPARE_SSE_USAGE();
3685
3686 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2pd_u128, pDst, u64Src);
3687 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3688
3689 IEM_MC_ADVANCE_RIP_AND_FINISH();
3690 IEM_MC_END();
3691 }
3692}
3693
3694
3695/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3696FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3697{
3698 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3699
3700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3701 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3702 {
3703 if (IEM_IS_MODRM_REG_MODE(bRm))
3704 {
3705 /* XMM, greg64 */
3706 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3707 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3708 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3709 IEM_MC_ARG(const int64_t *, pi64Src, 1);
3710
3711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3712 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3713 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3714
3715 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3716 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i64, pr32Dst, pi64Src);
3717 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3718
3719 IEM_MC_ADVANCE_RIP_AND_FINISH();
3720 IEM_MC_END();
3721 }
3722 else
3723 {
3724 /* XMM, [mem64] */
3725 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3727 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3728 IEM_MC_LOCAL(int64_t, i64Src);
3729 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3730 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 1);
3731
3732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3734 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3735 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3736
3737 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3738 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i64, pr32Dst, pi64Src);
3739 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3740
3741 IEM_MC_ADVANCE_RIP_AND_FINISH();
3742 IEM_MC_END();
3743 }
3744 }
3745 else
3746 {
3747 if (IEM_IS_MODRM_REG_MODE(bRm))
3748 {
3749 /* greg, XMM */
3750 IEM_MC_BEGIN(0, 0);
3751 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3752 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3753 IEM_MC_ARG(const int32_t *, pi32Src, 1);
3754
3755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3756 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3757 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3758
3759 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3760 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i32, pr32Dst, pi32Src);
3761 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3762
3763 IEM_MC_ADVANCE_RIP_AND_FINISH();
3764 IEM_MC_END();
3765 }
3766 else
3767 {
3768 /* greg, [mem32] */
3769 IEM_MC_BEGIN(0, 0);
3770 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3771 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3772 IEM_MC_LOCAL(int32_t, i32Src);
3773 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3774 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 1);
3775
3776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3778 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3779 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3780
3781 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3782 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i32, pr32Dst, pi32Src);
3783 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3784
3785 IEM_MC_ADVANCE_RIP_AND_FINISH();
3786 IEM_MC_END();
3787 }
3788 }
3789}
3790
3791
3792/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3793FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3794{
3795 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3796
3797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3798 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3799 {
3800 if (IEM_IS_MODRM_REG_MODE(bRm))
3801 {
3802 /* XMM, greg64 */
3803 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3804 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3805 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3806 IEM_MC_ARG(const int64_t *, pi64Src, 1);
3807
3808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3809 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3810 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3811
3812 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3813 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i64, pr64Dst, pi64Src);
3814 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3815
3816 IEM_MC_ADVANCE_RIP_AND_FINISH();
3817 IEM_MC_END();
3818 }
3819 else
3820 {
3821 /* XMM, [mem64] */
3822 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3824 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3825 IEM_MC_LOCAL(int64_t, i64Src);
3826 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3827 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 1);
3828
3829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3831 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3832 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3833
3834 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3835 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i64, pr64Dst, pi64Src);
3836 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3837
3838 IEM_MC_ADVANCE_RIP_AND_FINISH();
3839 IEM_MC_END();
3840 }
3841 }
3842 else
3843 {
3844 if (IEM_IS_MODRM_REG_MODE(bRm))
3845 {
3846 /* XMM, greg32 */
3847 IEM_MC_BEGIN(0, 0);
3848 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3849 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3850 IEM_MC_ARG(const int32_t *, pi32Src, 1);
3851
3852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3853 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3854 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3855
3856 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3857 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i32, pr64Dst, pi32Src);
3858 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3859
3860 IEM_MC_ADVANCE_RIP_AND_FINISH();
3861 IEM_MC_END();
3862 }
3863 else
3864 {
3865 /* XMM, [mem32] */
3866 IEM_MC_BEGIN(0, 0);
3867 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3868 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3869 IEM_MC_LOCAL(int32_t, i32Src);
3870 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3871 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 1);
3872
3873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3875 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3876 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3877
3878 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3879 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i32, pr64Dst, pi32Src);
3880 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3881
3882 IEM_MC_ADVANCE_RIP_AND_FINISH();
3883 IEM_MC_END();
3884 }
3885 }
3886}
3887
3888
3889/**
3890 * @opcode 0x2b
3891 * @opcodesub !11 mr/reg
3892 * @oppfx none
3893 * @opcpuid sse
3894 * @opgroup og_sse1_cachect
3895 * @opxcpttype 1
3896 * @optest op1=1 op2=2 -> op1=2
3897 * @optest op1=0 op2=-42 -> op1=-42
3898 */
3899FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
3900{
3901 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3902 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3903 if (IEM_IS_MODRM_MEM_MODE(bRm))
3904 {
3905 /*
3906 * memory, register.
3907 */
3908 IEM_MC_BEGIN(0, 0);
3909 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3911
3912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3914 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3915 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3916
3917 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3918 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3919
3920 IEM_MC_ADVANCE_RIP_AND_FINISH();
3921 IEM_MC_END();
3922 }
3923 /* The register, register encoding is invalid. */
3924 else
3925 IEMOP_RAISE_INVALID_OPCODE_RET();
3926}
3927
3928/**
3929 * @opcode 0x2b
3930 * @opcodesub !11 mr/reg
3931 * @oppfx 0x66
3932 * @opcpuid sse2
3933 * @opgroup og_sse2_cachect
3934 * @opxcpttype 1
3935 * @optest op1=1 op2=2 -> op1=2
3936 * @optest op1=0 op2=-42 -> op1=-42
3937 */
3938FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
3939{
3940 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3941 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3942 if (IEM_IS_MODRM_MEM_MODE(bRm))
3943 {
3944 /*
3945 * memory, register.
3946 */
3947 IEM_MC_BEGIN(0, 0);
3948 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3950
3951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3953 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3954 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3955
3956 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3957 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3958
3959 IEM_MC_ADVANCE_RIP_AND_FINISH();
3960 IEM_MC_END();
3961 }
3962 /* The register, register encoding is invalid. */
3963 else
3964 IEMOP_RAISE_INVALID_OPCODE_RET();
3965}
3966/* Opcode 0xf3 0x0f 0x2b - invalid */
3967/* Opcode 0xf2 0x0f 0x2b - invalid */
3968
3969
3970/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
3971FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
3972{
3973 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3975 if (IEM_IS_MODRM_REG_MODE(bRm))
3976 {
3977 /*
3978 * Register, register.
3979 */
3980 IEM_MC_BEGIN(0, 0);
3981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3982 IEM_MC_LOCAL(uint64_t, u64Dst);
3983 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
3984 IEM_MC_ARG(uint64_t, u64Src, 1);
3985 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3986 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3987 IEM_MC_PREPARE_FPU_USAGE();
3988 IEM_MC_FPU_TO_MMX_MODE();
3989
3990 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
3991
3992 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttps2pi_u128, pu64Dst, u64Src);
3993 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
3994
3995 IEM_MC_ADVANCE_RIP_AND_FINISH();
3996 IEM_MC_END();
3997 }
3998 else
3999 {
4000 /*
4001 * Register, memory.
4002 */
4003 IEM_MC_BEGIN(0, 0);
4004 IEM_MC_LOCAL(uint64_t, u64Dst);
4005 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4006 IEM_MC_ARG(uint64_t, u64Src, 1);
4007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4008
4009 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4011 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4013 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4014
4015 IEM_MC_PREPARE_FPU_USAGE();
4016 IEM_MC_FPU_TO_MMX_MODE();
4017
4018 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttps2pi_u128, pu64Dst, u64Src);
4019 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4020
4021 IEM_MC_ADVANCE_RIP_AND_FINISH();
4022 IEM_MC_END();
4023 }
4024}
4025
4026
4027/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4028FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4029{
4030 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4032 if (IEM_IS_MODRM_REG_MODE(bRm))
4033 {
4034 /*
4035 * Register, register.
4036 */
4037 IEM_MC_BEGIN(0, 0);
4038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4039 IEM_MC_LOCAL(uint64_t, u64Dst);
4040 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4041 IEM_MC_ARG(PCX86XMMREG, pSrc, 1);
4042 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4043 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4044 IEM_MC_PREPARE_FPU_USAGE();
4045 IEM_MC_FPU_TO_MMX_MODE();
4046
4047 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4048
4049 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttpd2pi_u128, pu64Dst, pSrc);
4050 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4051
4052 IEM_MC_ADVANCE_RIP_AND_FINISH();
4053 IEM_MC_END();
4054 }
4055 else
4056 {
4057 /*
4058 * Register, memory.
4059 */
4060 IEM_MC_BEGIN(0, 0);
4061 IEM_MC_LOCAL(uint64_t, u64Dst);
4062 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4063 IEM_MC_LOCAL(X86XMMREG, uSrc);
4064 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 1);
4065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4066
4067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4069 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4070 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4071 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4072
4073 IEM_MC_PREPARE_FPU_USAGE();
4074 IEM_MC_FPU_TO_MMX_MODE();
4075
4076 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttpd2pi_u128, pu64Dst, pSrc);
4077 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4078
4079 IEM_MC_ADVANCE_RIP_AND_FINISH();
4080 IEM_MC_END();
4081 }
4082}
4083
4084
4085/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4086FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4087{
4088 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4089
4090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4091 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4092 {
4093 if (IEM_IS_MODRM_REG_MODE(bRm))
4094 {
4095 /* greg64, XMM */
4096 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4097 IEM_MC_LOCAL(int64_t, i64Dst);
4098 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4099 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4100
4101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4102 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4103 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4104
4105 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4106 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i64_r32, pi64Dst, pu32Src);
4107 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4108
4109 IEM_MC_ADVANCE_RIP_AND_FINISH();
4110 IEM_MC_END();
4111 }
4112 else
4113 {
4114 /* greg64, [mem64] */
4115 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4117 IEM_MC_LOCAL(int64_t, i64Dst);
4118 IEM_MC_LOCAL(uint32_t, u32Src);
4119 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4120 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4121
4122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4124 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4125 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4126
4127 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4128 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i64_r32, pi64Dst, pu32Src);
4129 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4130
4131 IEM_MC_ADVANCE_RIP_AND_FINISH();
4132 IEM_MC_END();
4133 }
4134 }
4135 else
4136 {
4137 if (IEM_IS_MODRM_REG_MODE(bRm))
4138 {
4139 /* greg, XMM */
4140 IEM_MC_BEGIN(0, 0);
4141 IEM_MC_LOCAL(int32_t, i32Dst);
4142 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4143 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4144
4145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4146 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4147 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4148
4149 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4150 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i32_r32, pi32Dst, pu32Src);
4151 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4152
4153 IEM_MC_ADVANCE_RIP_AND_FINISH();
4154 IEM_MC_END();
4155 }
4156 else
4157 {
4158 /* greg, [mem] */
4159 IEM_MC_BEGIN(0, 0);
4160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4161 IEM_MC_LOCAL(int32_t, i32Dst);
4162 IEM_MC_LOCAL(uint32_t, u32Src);
4163 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4164 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4165
4166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4168 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4169 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4170
4171 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4172 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i32_r32, pi32Dst, pu32Src);
4173 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4174
4175 IEM_MC_ADVANCE_RIP_AND_FINISH();
4176 IEM_MC_END();
4177 }
4178 }
4179}
4180
4181
4182/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4183FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4184{
4185 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4186
4187 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4188 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4189 {
4190 if (IEM_IS_MODRM_REG_MODE(bRm))
4191 {
4192 /* greg64, XMM */
4193 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4194 IEM_MC_LOCAL(int64_t, i64Dst);
4195 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4196 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4197
4198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4199 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4200 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4201
4202 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4203 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i64_r64, pi64Dst, pu64Src);
4204 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4205
4206 IEM_MC_ADVANCE_RIP_AND_FINISH();
4207 IEM_MC_END();
4208 }
4209 else
4210 {
4211 /* greg64, [mem64] */
4212 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4214 IEM_MC_LOCAL(int64_t, i64Dst);
4215 IEM_MC_LOCAL(uint64_t, u64Src);
4216 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4217 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4218
4219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4221 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4222 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4223
4224 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4225 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i64_r64, pi64Dst, pu64Src);
4226 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4227
4228 IEM_MC_ADVANCE_RIP_AND_FINISH();
4229 IEM_MC_END();
4230 }
4231 }
4232 else
4233 {
4234 if (IEM_IS_MODRM_REG_MODE(bRm))
4235 {
4236 /* greg, XMM */
4237 IEM_MC_BEGIN(0, 0);
4238 IEM_MC_LOCAL(int32_t, i32Dst);
4239 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4240 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4241
4242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4243 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4244 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4245
4246 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4247 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i32_r64, pi32Dst, pu64Src);
4248 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4249
4250 IEM_MC_ADVANCE_RIP_AND_FINISH();
4251 IEM_MC_END();
4252 }
4253 else
4254 {
4255 /* greg32, [mem32] */
4256 IEM_MC_BEGIN(0, 0);
4257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4258 IEM_MC_LOCAL(int32_t, i32Dst);
4259 IEM_MC_LOCAL(uint64_t, u64Src);
4260 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4261 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4262
4263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4265 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4266 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4267
4268 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4269 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i32_r64, pi32Dst, pu64Src);
4270 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4271
4272 IEM_MC_ADVANCE_RIP_AND_FINISH();
4273 IEM_MC_END();
4274 }
4275 }
4276}
4277
4278
4279/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4280FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4281{
4282 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4283 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4284 if (IEM_IS_MODRM_REG_MODE(bRm))
4285 {
4286 /*
4287 * Register, register.
4288 */
4289 IEM_MC_BEGIN(0, 0);
4290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4291 IEM_MC_LOCAL(uint64_t, u64Dst);
4292 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4293 IEM_MC_ARG(uint64_t, u64Src, 1);
4294
4295 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4296 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4297 IEM_MC_PREPARE_FPU_USAGE();
4298 IEM_MC_FPU_TO_MMX_MODE();
4299
4300 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4301
4302 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pi_u128, pu64Dst, u64Src);
4303 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4304
4305 IEM_MC_ADVANCE_RIP_AND_FINISH();
4306 IEM_MC_END();
4307 }
4308 else
4309 {
4310 /*
4311 * Register, memory.
4312 */
4313 IEM_MC_BEGIN(0, 0);
4314 IEM_MC_LOCAL(uint64_t, u64Dst);
4315 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4316 IEM_MC_ARG(uint64_t, u64Src, 1);
4317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4318
4319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4321 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4322 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4323 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4324
4325 IEM_MC_PREPARE_FPU_USAGE();
4326 IEM_MC_FPU_TO_MMX_MODE();
4327
4328 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pi_u128, pu64Dst, u64Src);
4329 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4330
4331 IEM_MC_ADVANCE_RIP_AND_FINISH();
4332 IEM_MC_END();
4333 }
4334}
4335
4336
4337/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4338FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4339{
4340 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4342 if (IEM_IS_MODRM_REG_MODE(bRm))
4343 {
4344 /*
4345 * Register, register.
4346 */
4347 IEM_MC_BEGIN(0, 0);
4348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4349 IEM_MC_LOCAL(uint64_t, u64Dst);
4350 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4351 IEM_MC_ARG(PCX86XMMREG, pSrc, 1);
4352
4353 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4354 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4355 IEM_MC_PREPARE_FPU_USAGE();
4356 IEM_MC_FPU_TO_MMX_MODE();
4357
4358 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4359
4360 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpd2pi_u128, pu64Dst, pSrc);
4361 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4362
4363 IEM_MC_ADVANCE_RIP_AND_FINISH();
4364 IEM_MC_END();
4365 }
4366 else
4367 {
4368 /*
4369 * Register, memory.
4370 */
4371 IEM_MC_BEGIN(0, 0);
4372 IEM_MC_LOCAL(uint64_t, u64Dst);
4373 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4374 IEM_MC_LOCAL(X86XMMREG, uSrc);
4375 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 1);
4376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4377
4378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4380 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4381 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4382 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4383
4384 IEM_MC_PREPARE_FPU_USAGE();
4385 IEM_MC_FPU_TO_MMX_MODE();
4386
4387 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpd2pi_u128, pu64Dst, pSrc);
4388 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4389
4390 IEM_MC_ADVANCE_RIP_AND_FINISH();
4391 IEM_MC_END();
4392 }
4393}
4394
4395
4396/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4397FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4398{
4399 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4400
4401 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4402 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4403 {
4404 if (IEM_IS_MODRM_REG_MODE(bRm))
4405 {
4406 /* greg64, XMM */
4407 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4408 IEM_MC_LOCAL(int64_t, i64Dst);
4409 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4410 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4411
4412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4413 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4414 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4415
4416 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4417 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i64_r32, pi64Dst, pu32Src);
4418 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4419
4420 IEM_MC_ADVANCE_RIP_AND_FINISH();
4421 IEM_MC_END();
4422 }
4423 else
4424 {
4425 /* greg64, [mem64] */
4426 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4428 IEM_MC_LOCAL(int64_t, i64Dst);
4429 IEM_MC_LOCAL(uint32_t, u32Src);
4430 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4431 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4432
4433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4435 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4436 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4437
4438 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4439 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i64_r32, pi64Dst, pu32Src);
4440 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4441
4442 IEM_MC_ADVANCE_RIP_AND_FINISH();
4443 IEM_MC_END();
4444 }
4445 }
4446 else
4447 {
4448 if (IEM_IS_MODRM_REG_MODE(bRm))
4449 {
4450 /* greg, XMM */
4451 IEM_MC_BEGIN(0, 0);
4452 IEM_MC_LOCAL(int32_t, i32Dst);
4453 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4454 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4455
4456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4457 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4458 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4459
4460 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4461 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i32_r32, pi32Dst, pu32Src);
4462 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4463
4464 IEM_MC_ADVANCE_RIP_AND_FINISH();
4465 IEM_MC_END();
4466 }
4467 else
4468 {
4469 /* greg, [mem] */
4470 IEM_MC_BEGIN(0, 0);
4471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4472 IEM_MC_LOCAL(int32_t, i32Dst);
4473 IEM_MC_LOCAL(uint32_t, u32Src);
4474 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4475 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4476
4477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4479 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4480 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4481
4482 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4483 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i32_r32, pi32Dst, pu32Src);
4484 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4485
4486 IEM_MC_ADVANCE_RIP_AND_FINISH();
4487 IEM_MC_END();
4488 }
4489 }
4490}
4491
4492
4493/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4494FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4495{
4496 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4497
4498 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4499 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4500 {
4501 if (IEM_IS_MODRM_REG_MODE(bRm))
4502 {
4503 /* greg64, XMM */
4504 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4505 IEM_MC_LOCAL(int64_t, i64Dst);
4506 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4507 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4508
4509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4510 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4511 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4512
4513 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4514 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i64_r64, pi64Dst, pu64Src);
4515 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4516
4517 IEM_MC_ADVANCE_RIP_AND_FINISH();
4518 IEM_MC_END();
4519 }
4520 else
4521 {
4522 /* greg64, [mem64] */
4523 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4525 IEM_MC_LOCAL(int64_t, i64Dst);
4526 IEM_MC_LOCAL(uint64_t, u64Src);
4527 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4528 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4529
4530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4532 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4533 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4534
4535 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4536 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i64_r64, pi64Dst, pu64Src);
4537 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4538
4539 IEM_MC_ADVANCE_RIP_AND_FINISH();
4540 IEM_MC_END();
4541 }
4542 }
4543 else
4544 {
4545 if (IEM_IS_MODRM_REG_MODE(bRm))
4546 {
4547 /* greg32, XMM */
4548 IEM_MC_BEGIN(0, 0);
4549 IEM_MC_LOCAL(int32_t, i32Dst);
4550 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4551 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4552
4553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4554 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4555 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4556
4557 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4558 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i32_r64, pi32Dst, pu64Src);
4559 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4560
4561 IEM_MC_ADVANCE_RIP_AND_FINISH();
4562 IEM_MC_END();
4563 }
4564 else
4565 {
4566 /* greg32, [mem64] */
4567 IEM_MC_BEGIN(0, 0);
4568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4569 IEM_MC_LOCAL(int32_t, i32Dst);
4570 IEM_MC_LOCAL(uint64_t, u64Src);
4571 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4572 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4573
4574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4576 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4577 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4578
4579 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4580 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i32_r64, pi32Dst, pu64Src);
4581 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4582
4583 IEM_MC_ADVANCE_RIP_AND_FINISH();
4584 IEM_MC_END();
4585 }
4586 }
4587}
4588
4589
4590/**
4591 * @opcode 0x2e
4592 * @oppfx none
4593 * @opflmodify cf,pf,af,zf,sf,of
4594 * @opflclear af,sf,of
4595 */
4596FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4597{
4598 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4600 if (IEM_IS_MODRM_REG_MODE(bRm))
4601 {
4602 /*
4603 * Register, register.
4604 */
4605 IEM_MC_BEGIN(0, 0);
4606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4607 IEM_MC_LOCAL(uint32_t, fEFlags);
4608 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4609 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4610 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4611 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4612 IEM_MC_PREPARE_SSE_USAGE();
4613 IEM_MC_FETCH_EFLAGS(fEFlags);
4614 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4615 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
4616 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomiss_u128, pEFlags, uSrc1, uSrc2);
4617 IEM_MC_COMMIT_EFLAGS(fEFlags);
4618
4619 IEM_MC_ADVANCE_RIP_AND_FINISH();
4620 IEM_MC_END();
4621 }
4622 else
4623 {
4624 /*
4625 * Register, memory.
4626 */
4627 IEM_MC_BEGIN(0, 0);
4628 IEM_MC_LOCAL(uint32_t, fEFlags);
4629 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4630 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4631 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4633
4634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4636 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4637 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4638
4639 IEM_MC_PREPARE_SSE_USAGE();
4640 IEM_MC_FETCH_EFLAGS(fEFlags);
4641 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4642 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomiss_u128, pEFlags, uSrc1, uSrc2);
4643 IEM_MC_COMMIT_EFLAGS(fEFlags);
4644
4645 IEM_MC_ADVANCE_RIP_AND_FINISH();
4646 IEM_MC_END();
4647 }
4648}
4649
4650
4651/**
4652 * @opcode 0x2e
4653 * @oppfx 0x66
4654 * @opflmodify cf,pf,af,zf,sf,of
4655 * @opflclear af,sf,of
4656 */
4657FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4658{
4659 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4660 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4661 if (IEM_IS_MODRM_REG_MODE(bRm))
4662 {
4663 /*
4664 * Register, register.
4665 */
4666 IEM_MC_BEGIN(0, 0);
4667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4668 IEM_MC_LOCAL(uint32_t, fEFlags);
4669 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4670 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4671 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4672 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4673 IEM_MC_PREPARE_SSE_USAGE();
4674 IEM_MC_FETCH_EFLAGS(fEFlags);
4675 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4676 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
4677 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomisd_u128, pEFlags, uSrc1, uSrc2);
4678 IEM_MC_COMMIT_EFLAGS(fEFlags);
4679
4680 IEM_MC_ADVANCE_RIP_AND_FINISH();
4681 IEM_MC_END();
4682 }
4683 else
4684 {
4685 /*
4686 * Register, memory.
4687 */
4688 IEM_MC_BEGIN(0, 0);
4689 IEM_MC_LOCAL(uint32_t, fEFlags);
4690 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4691 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4692 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4694
4695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4697 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4698 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4699
4700 IEM_MC_PREPARE_SSE_USAGE();
4701 IEM_MC_FETCH_EFLAGS(fEFlags);
4702 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4703 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomisd_u128, pEFlags, uSrc1, uSrc2);
4704 IEM_MC_COMMIT_EFLAGS(fEFlags);
4705
4706 IEM_MC_ADVANCE_RIP_AND_FINISH();
4707 IEM_MC_END();
4708 }
4709}
4710
4711
4712/* Opcode 0xf3 0x0f 0x2e - invalid */
4713/* Opcode 0xf2 0x0f 0x2e - invalid */
4714
4715
4716/**
4717 * @opcode 0x2e
4718 * @oppfx none
4719 * @opflmodify cf,pf,af,zf,sf,of
4720 * @opflclear af,sf,of
4721 */
4722FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
4723{
4724 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4725 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4726 if (IEM_IS_MODRM_REG_MODE(bRm))
4727 {
4728 /*
4729 * Register, register.
4730 */
4731 IEM_MC_BEGIN(0, 0);
4732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4733 IEM_MC_LOCAL(uint32_t, fEFlags);
4734 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4735 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4736 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4737 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4738 IEM_MC_PREPARE_SSE_USAGE();
4739 IEM_MC_FETCH_EFLAGS(fEFlags);
4740 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4741 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
4742 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comiss_u128, pEFlags, uSrc1, uSrc2);
4743 IEM_MC_COMMIT_EFLAGS(fEFlags);
4744
4745 IEM_MC_ADVANCE_RIP_AND_FINISH();
4746 IEM_MC_END();
4747 }
4748 else
4749 {
4750 /*
4751 * Register, memory.
4752 */
4753 IEM_MC_BEGIN(0, 0);
4754 IEM_MC_LOCAL(uint32_t, fEFlags);
4755 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4756 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4757 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4759
4760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4762 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4763 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4764
4765 IEM_MC_PREPARE_SSE_USAGE();
4766 IEM_MC_FETCH_EFLAGS(fEFlags);
4767 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4768 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comiss_u128, pEFlags, uSrc1, uSrc2);
4769 IEM_MC_COMMIT_EFLAGS(fEFlags);
4770
4771 IEM_MC_ADVANCE_RIP_AND_FINISH();
4772 IEM_MC_END();
4773 }
4774}
4775
4776
4777/**
4778 * @opcode 0x2f
4779 * @oppfx 0x66
4780 * @opflmodify cf,pf,af,zf,sf,of
4781 * @opflclear af,sf,of
4782 */
4783FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
4784{
4785 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4787 if (IEM_IS_MODRM_REG_MODE(bRm))
4788 {
4789 /*
4790 * Register, register.
4791 */
4792 IEM_MC_BEGIN(0, 0);
4793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4794 IEM_MC_LOCAL(uint32_t, fEFlags);
4795 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4796 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4797 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4798 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4799 IEM_MC_PREPARE_SSE_USAGE();
4800 IEM_MC_FETCH_EFLAGS(fEFlags);
4801 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4802 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
4803 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comisd_u128, pEFlags, uSrc1, uSrc2);
4804 IEM_MC_COMMIT_EFLAGS(fEFlags);
4805
4806 IEM_MC_ADVANCE_RIP_AND_FINISH();
4807 IEM_MC_END();
4808 }
4809 else
4810 {
4811 /*
4812 * Register, memory.
4813 */
4814 IEM_MC_BEGIN(0, 0);
4815 IEM_MC_LOCAL(uint32_t, fEFlags);
4816 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4817 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4818 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4820
4821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4823 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4824 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4825
4826 IEM_MC_PREPARE_SSE_USAGE();
4827 IEM_MC_FETCH_EFLAGS(fEFlags);
4828 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4829 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comisd_u128, pEFlags, uSrc1, uSrc2);
4830 IEM_MC_COMMIT_EFLAGS(fEFlags);
4831
4832 IEM_MC_ADVANCE_RIP_AND_FINISH();
4833 IEM_MC_END();
4834 }
4835}
4836
4837
4838/* Opcode 0xf3 0x0f 0x2f - invalid */
4839/* Opcode 0xf2 0x0f 0x2f - invalid */
4840
4841/** Opcode 0x0f 0x30. */
4842FNIEMOP_DEF(iemOp_wrmsr)
4843{
4844 IEMOP_MNEMONIC(wrmsr, "wrmsr");
4845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4846 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wrmsr);
4847}
4848
4849
4850/** Opcode 0x0f 0x31. */
4851FNIEMOP_DEF(iemOp_rdtsc)
4852{
4853 IEMOP_MNEMONIC(rdtsc, "rdtsc");
4854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4855 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4856 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4857 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4858 iemCImpl_rdtsc);
4859}
4860
4861
4862/** Opcode 0x0f 0x33. */
4863FNIEMOP_DEF(iemOp_rdmsr)
4864{
4865 IEMOP_MNEMONIC(rdmsr, "rdmsr");
4866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4867 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4868 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4869 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4870 iemCImpl_rdmsr);
4871}
4872
4873
4874/** Opcode 0x0f 0x34. */
4875FNIEMOP_DEF(iemOp_rdpmc)
4876{
4877 IEMOP_MNEMONIC(rdpmc, "rdpmc");
4878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4879 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4880 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4881 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4882 iemCImpl_rdpmc);
4883}
4884
4885
4886/** Opcode 0x0f 0x34. */
4887FNIEMOP_DEF(iemOp_sysenter)
4888{
4889 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4891 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
4892 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
4893 iemCImpl_sysenter);
4894}
4895
4896/** Opcode 0x0f 0x35. */
4897FNIEMOP_DEF(iemOp_sysexit)
4898{
4899 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4901 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
4902 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
4903 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
4904}
4905
4906/** Opcode 0x0f 0x37. */
4907FNIEMOP_STUB(iemOp_getsec);
4908
4909
4910/** Opcode 0x0f 0x38. */
4911FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
4912{
4913#ifdef IEM_WITH_THREE_0F_38
4914 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4915 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4916#else
4917 IEMOP_BITCH_ABOUT_STUB();
4918 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4919#endif
4920}
4921
4922
4923/** Opcode 0x0f 0x3a. */
4924FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
4925{
4926#ifdef IEM_WITH_THREE_0F_3A
4927 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4928 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4929#else
4930 IEMOP_BITCH_ABOUT_STUB();
4931 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4932#endif
4933}
4934
4935
4936/**
4937 * Implements a conditional move.
4938 *
4939 * Wish there was an obvious way to do this where we could share and reduce
4940 * code bloat.
4941 *
4942 * @param a_Cnd The conditional "microcode" operation.
4943 */
4944#define CMOV_X(a_Cnd) \
4945 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4946 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4947 { \
4948 switch (pVCpu->iem.s.enmEffOpSize) \
4949 { \
4950 case IEMMODE_16BIT: \
4951 IEM_MC_BEGIN(0, 0); \
4952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4953 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4954 a_Cnd { \
4955 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4956 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4957 } IEM_MC_ENDIF(); \
4958 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4959 IEM_MC_END(); \
4960 break; \
4961 \
4962 case IEMMODE_32BIT: \
4963 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4965 IEM_MC_LOCAL(uint32_t, u32Tmp); \
4966 a_Cnd { \
4967 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4968 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
4969 } IEM_MC_ELSE() { \
4970 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
4971 } IEM_MC_ENDIF(); \
4972 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4973 IEM_MC_END(); \
4974 break; \
4975 \
4976 case IEMMODE_64BIT: \
4977 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4979 IEM_MC_LOCAL(uint64_t, u64Tmp); \
4980 a_Cnd { \
4981 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4982 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
4983 } IEM_MC_ENDIF(); \
4984 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4985 IEM_MC_END(); \
4986 break; \
4987 \
4988 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4989 } \
4990 } \
4991 else \
4992 { \
4993 switch (pVCpu->iem.s.enmEffOpSize) \
4994 { \
4995 case IEMMODE_16BIT: \
4996 IEM_MC_BEGIN(0, 0); \
4997 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4998 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4999 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5001 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5002 a_Cnd { \
5003 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5004 } IEM_MC_ENDIF(); \
5005 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5006 IEM_MC_END(); \
5007 break; \
5008 \
5009 case IEMMODE_32BIT: \
5010 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5012 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5015 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5016 a_Cnd { \
5017 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5018 } IEM_MC_ELSE() { \
5019 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5020 } IEM_MC_ENDIF(); \
5021 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5022 IEM_MC_END(); \
5023 break; \
5024 \
5025 case IEMMODE_64BIT: \
5026 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5028 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5029 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5031 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5032 a_Cnd { \
5033 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5034 } IEM_MC_ENDIF(); \
5035 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5036 IEM_MC_END(); \
5037 break; \
5038 \
5039 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5040 } \
5041 } do {} while (0)
5042
5043
5044
5045/**
5046 * @opcode 0x40
5047 * @opfltest of
5048 */
5049FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5050{
5051 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5052 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5053}
5054
5055
5056/**
5057 * @opcode 0x41
5058 * @opfltest of
5059 */
5060FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5061{
5062 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5063 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5064}
5065
5066
5067/**
5068 * @opcode 0x42
5069 * @opfltest cf
5070 */
5071FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5072{
5073 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5074 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5075}
5076
5077
5078/**
5079 * @opcode 0x43
5080 * @opfltest cf
5081 */
5082FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5083{
5084 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5085 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5086}
5087
5088
5089/**
5090 * @opcode 0x44
5091 * @opfltest zf
5092 */
5093FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5094{
5095 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5096 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5097}
5098
5099
5100/**
5101 * @opcode 0x45
5102 * @opfltest zf
5103 */
5104FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5105{
5106 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5107 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5108}
5109
5110
5111/**
5112 * @opcode 0x46
5113 * @opfltest cf,zf
5114 */
5115FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5116{
5117 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5118 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5119}
5120
5121
5122/**
5123 * @opcode 0x47
5124 * @opfltest cf,zf
5125 */
5126FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5127{
5128 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5129 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5130}
5131
5132
5133/**
5134 * @opcode 0x48
5135 * @opfltest sf
5136 */
5137FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5138{
5139 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5140 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5141}
5142
5143
5144/**
5145 * @opcode 0x49
5146 * @opfltest sf
5147 */
5148FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5149{
5150 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5151 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5152}
5153
5154
5155/**
5156 * @opcode 0x4a
5157 * @opfltest pf
5158 */
5159FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5160{
5161 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5162 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5163}
5164
5165
5166/**
5167 * @opcode 0x4b
5168 * @opfltest pf
5169 */
5170FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5171{
5172 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5173 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5174}
5175
5176
5177/**
5178 * @opcode 0x4c
5179 * @opfltest sf,of
5180 */
5181FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5182{
5183 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5184 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5185}
5186
5187
5188/**
5189 * @opcode 0x4d
5190 * @opfltest sf,of
5191 */
5192FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5193{
5194 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5195 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5196}
5197
5198
5199/**
5200 * @opcode 0x4e
5201 * @opfltest zf,sf,of
5202 */
5203FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5204{
5205 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5206 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5207}
5208
5209
5210/**
5211 * @opcode 0x4e
5212 * @opfltest zf,sf,of
5213 */
5214FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5215{
5216 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5217 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5218}
5219
5220#undef CMOV_X
5221
5222/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5223FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5224{
5225 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5227 if (IEM_IS_MODRM_REG_MODE(bRm))
5228 {
5229 /*
5230 * Register, register.
5231 */
5232 IEM_MC_BEGIN(0, 0);
5233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5234 IEM_MC_LOCAL(uint8_t, u8Dst);
5235 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5236 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5237 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5238 IEM_MC_PREPARE_SSE_USAGE();
5239 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5240 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5241 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5242 IEM_MC_ADVANCE_RIP_AND_FINISH();
5243 IEM_MC_END();
5244 }
5245 /* No memory operand. */
5246 else
5247 IEMOP_RAISE_INVALID_OPCODE_RET();
5248}
5249
5250
5251/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5252FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5253{
5254 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5255 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5256 if (IEM_IS_MODRM_REG_MODE(bRm))
5257 {
5258 /*
5259 * Register, register.
5260 */
5261 IEM_MC_BEGIN(0, 0);
5262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5263 IEM_MC_LOCAL(uint8_t, u8Dst);
5264 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5265 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5266 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5267 IEM_MC_PREPARE_SSE_USAGE();
5268 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5269 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5270 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5271 IEM_MC_ADVANCE_RIP_AND_FINISH();
5272 IEM_MC_END();
5273 }
5274 /* No memory operand. */
5275 else
5276 IEMOP_RAISE_INVALID_OPCODE_RET();
5277
5278}
5279
5280
5281/* Opcode 0xf3 0x0f 0x50 - invalid */
5282/* Opcode 0xf2 0x0f 0x50 - invalid */
5283
5284
5285/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5286FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5287{
5288 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5289 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5290}
5291
5292
5293/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5294FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5295{
5296 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5297 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5298}
5299
5300
5301/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5302FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5303{
5304 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5305 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5306}
5307
5308
5309/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5310FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5311{
5312 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5313 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5314}
5315
5316
5317/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5318FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5319{
5320 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5321 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5322}
5323
5324
5325/* Opcode 0x66 0x0f 0x52 - invalid */
5326
5327
5328/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5329FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5330{
5331 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5332 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5333}
5334
5335
5336/* Opcode 0xf2 0x0f 0x52 - invalid */
5337
5338
5339/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5340FNIEMOP_DEF(iemOp_rcpps_Vps_Wps)
5341{
5342 IEMOP_MNEMONIC2(RM, RCPPS, rcpps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5343 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rcpps_u128);
5344}
5345
5346
5347/* Opcode 0x66 0x0f 0x53 - invalid */
5348
5349
5350/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5351FNIEMOP_DEF(iemOp_rcpss_Vss_Wss)
5352{
5353 IEMOP_MNEMONIC2(RM, RCPSS, rcpss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5354 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rcpss_u128_r32);
5355}
5356
5357
5358/* Opcode 0xf2 0x0f 0x53 - invalid */
5359
5360
5361/** Opcode 0x0f 0x54 - andps Vps, Wps */
5362FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5363{
5364 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5365 SSE2_OPT_BODY_FullFull_To_Full(pand, iemAImpl_pand_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5366}
5367
5368
5369/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5370FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5371{
5372 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5373 SSE2_OPT_BODY_FullFull_To_Full(pand, iemAImpl_pand_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5374}
5375
5376
5377/* Opcode 0xf3 0x0f 0x54 - invalid */
5378/* Opcode 0xf2 0x0f 0x54 - invalid */
5379
5380
5381/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5382FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5383{
5384 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5385 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_pandn_u128);
5386}
5387
5388
5389/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5390FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5391{
5392 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5393 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pandn_u128);
5394}
5395
5396
5397/* Opcode 0xf3 0x0f 0x55 - invalid */
5398/* Opcode 0xf2 0x0f 0x55 - invalid */
5399
5400
5401/** Opcode 0x0f 0x56 - orps Vps, Wps */
5402FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5403{
5404 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5405 SSE2_OPT_BODY_FullFull_To_Full(por, iemAImpl_por_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5406}
5407
5408
5409/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5410FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5411{
5412 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5413 SSE2_OPT_BODY_FullFull_To_Full(por, iemAImpl_por_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5414}
5415
5416
5417/* Opcode 0xf3 0x0f 0x56 - invalid */
5418/* Opcode 0xf2 0x0f 0x56 - invalid */
5419
5420
5421/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5422FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5423{
5424 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5425 SSE2_OPT_BODY_FullFull_To_Full(pxor, iemAImpl_pxor_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5426}
5427
5428
5429/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5430FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5431{
5432 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5433 SSE2_OPT_BODY_FullFull_To_Full(pxor, iemAImpl_pxor_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5434}
5435
5436
5437/* Opcode 0xf3 0x0f 0x57 - invalid */
5438/* Opcode 0xf2 0x0f 0x57 - invalid */
5439
5440/** Opcode 0x0f 0x58 - addps Vps, Wps */
5441FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5442{
5443 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5444 SSE_FP_BODY_FullFull_To_Full(addps, iemAImpl_addps_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5445}
5446
5447
5448/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5449FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5450{
5451 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5452 SSE_FP_BODY_FullFull_To_Full(addpd, iemAImpl_addpd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5453}
5454
5455
5456/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5457FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5458{
5459 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5460 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5461}
5462
5463
5464/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5465FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5466{
5467 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5468 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5469}
5470
5471
5472/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5473FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5474{
5475 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5476 SSE_FP_BODY_FullFull_To_Full(mulps, iemAImpl_mulps_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5477}
5478
5479
5480/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5481FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5482{
5483 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5484 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5485}
5486
5487
5488/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5489FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5490{
5491 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5492 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5493}
5494
5495
5496/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5497FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5498{
5499 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5500 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5501}
5502
5503
5504/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5505FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5506{
5507 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd_WO, Wps, DISOPTYPE_HARMLESS, 0);
5508 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5509 if (IEM_IS_MODRM_REG_MODE(bRm))
5510 {
5511 /*
5512 * XMM, XMM[63:0].
5513 */
5514 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
5515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5516 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5517 IEM_MC_PREPARE_SSE_USAGE();
5518
5519 IEM_MC_LOCAL(X86XMMREG, SseRes);
5520 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
5521 IEM_MC_ARG(uint64_t const *, pu64Src, 1); /* The input is actually two 32-bit float values, */
5522 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); /* but we've got no matching type or MC. */
5523 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pd_u128, pSseRes, pu64Src);
5524 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
5525
5526 IEM_MC_ADVANCE_RIP_AND_FINISH();
5527 IEM_MC_END();
5528 }
5529 else
5530 {
5531 /*
5532 * XMM, [mem64].
5533 */
5534 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
5535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5538 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5539
5540 IEM_MC_LOCAL(uint64_t, u64Src);
5541 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pu64Src, u64Src, 1); /* (see comment above wrt type) */
5542 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5543
5544 IEM_MC_PREPARE_SSE_USAGE();
5545 IEM_MC_LOCAL(X86XMMREG, SseRes);
5546 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
5547 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pd_u128, pSseRes, pu64Src);
5548 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
5549
5550 IEM_MC_ADVANCE_RIP_AND_FINISH();
5551 IEM_MC_END();
5552 }
5553}
5554
5555
5556/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5557FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5558{
5559 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps_WO, Wpd, DISOPTYPE_HARMLESS, 0);
5560 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5561 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5562}
5563
5564
5565/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5566FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5567{
5568 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5569 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5570}
5571
5572
5573/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5574FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5575{
5576 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5577 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5578}
5579
5580
5581/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5582FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5583{
5584 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5585 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5586 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5587}
5588
5589
5590/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5591FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5592{
5593 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5594 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5595 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5596}
5597
5598
5599/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5600FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5601{
5602 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5603 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5604 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5605}
5606
5607
5608/* Opcode 0xf2 0x0f 0x5b - invalid */
5609
5610
5611/** Opcode 0x0f 0x5c - subps Vps, Wps */
5612FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5613{
5614 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5615 SSE_FP_BODY_FullFull_To_Full(subps, iemAImpl_subps_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5616}
5617
5618
5619/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5620FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5621{
5622 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5623 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5624}
5625
5626
5627/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5628FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5629{
5630 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5631 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5632}
5633
5634
5635/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5636FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5637{
5638 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5639 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5640}
5641
5642
5643/** Opcode 0x0f 0x5d - minps Vps, Wps */
5644FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5645{
5646 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5647 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5648}
5649
5650
5651/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5652FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5653{
5654 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5655 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5656}
5657
5658
5659/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5660FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5661{
5662 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5663 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5664}
5665
5666
5667/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5668FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5669{
5670 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5671 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5672}
5673
5674
5675/** Opcode 0x0f 0x5e - divps Vps, Wps */
5676FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5677{
5678 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5679 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5680}
5681
5682
5683/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5684FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5685{
5686 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5687 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5688}
5689
5690
5691/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5692FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5693{
5694 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5695 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5696}
5697
5698
5699/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5700FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5701{
5702 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5703 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5704}
5705
5706
5707/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5708FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5709{
5710 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5711 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5712}
5713
5714
5715/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5716FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5717{
5718 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5719 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5720}
5721
5722
5723/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5724FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5725{
5726 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5727 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5728}
5729
5730
5731/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5732FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5733{
5734 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5735 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5736}
5737
5738
5739/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5740FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5741{
5742 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5743 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5744}
5745
5746
5747/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5748FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5749{
5750 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5751 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5752}
5753
5754
5755/* Opcode 0xf3 0x0f 0x60 - invalid */
5756
5757
5758/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5759FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5760{
5761 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5762 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5763 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5764}
5765
5766
5767/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5768FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5769{
5770 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5771 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5772}
5773
5774
5775/* Opcode 0xf3 0x0f 0x61 - invalid */
5776
5777
5778/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5779FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5780{
5781 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5782 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5783}
5784
5785
5786/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5787FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5788{
5789 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5790 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5791}
5792
5793
5794/* Opcode 0xf3 0x0f 0x62 - invalid */
5795
5796
5797
5798/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5799FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5800{
5801 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5802 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5803}
5804
5805
5806/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5807FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5808{
5809 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5810 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5811}
5812
5813
5814/* Opcode 0xf3 0x0f 0x63 - invalid */
5815
5816
5817/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5818FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5819{
5820 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5821 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5822}
5823
5824
5825/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5826FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5827{
5828 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5829 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
5830}
5831
5832
5833/* Opcode 0xf3 0x0f 0x64 - invalid */
5834
5835
5836/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
5837FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
5838{
5839 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5840 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
5841}
5842
5843
5844/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
5845FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
5846{
5847 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5848 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
5849}
5850
5851
5852/* Opcode 0xf3 0x0f 0x65 - invalid */
5853
5854
5855/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
5856FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
5857{
5858 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5859 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
5860}
5861
5862
5863/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
5864FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
5865{
5866 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5867 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
5868}
5869
5870
5871/* Opcode 0xf3 0x0f 0x66 - invalid */
5872
5873
5874/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
5875FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
5876{
5877 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5878 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
5879}
5880
5881
5882/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
5883FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
5884{
5885 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5886 SSE2_OPT_BODY_FullFull_To_Full(packuswb, iemAImpl_packuswb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5887}
5888
5889
5890/* Opcode 0xf3 0x0f 0x67 - invalid */
5891
5892
5893/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
5894 * @note Intel and AMD both uses Qd for the second parameter, however they
5895 * both list it as a mmX/mem64 operand and intel describes it as being
5896 * loaded as a qword, so it should be Qq, shouldn't it? */
5897FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
5898{
5899 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5900 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
5901}
5902
5903
5904/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
5905FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
5906{
5907 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5908 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
5909}
5910
5911
5912/* Opcode 0xf3 0x0f 0x68 - invalid */
5913
5914
5915/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
5916 * @note Intel and AMD both uses Qd for the second parameter, however they
5917 * both list it as a mmX/mem64 operand and intel describes it as being
5918 * loaded as a qword, so it should be Qq, shouldn't it? */
5919FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
5920{
5921 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5922 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
5923}
5924
5925
5926/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
5927FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
5928{
5929 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5930 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
5931
5932}
5933
5934
5935/* Opcode 0xf3 0x0f 0x69 - invalid */
5936
5937
5938/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
5939 * @note Intel and AMD both uses Qd for the second parameter, however they
5940 * both list it as a mmX/mem64 operand and intel describes it as being
5941 * loaded as a qword, so it should be Qq, shouldn't it? */
5942FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
5943{
5944 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5945 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
5946}
5947
5948
5949/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
5950FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
5951{
5952 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5953 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
5954}
5955
5956
5957/* Opcode 0xf3 0x0f 0x6a - invalid */
5958
5959
5960/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
5961FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
5962{
5963 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5964 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
5965}
5966
5967
5968/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
5969FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
5970{
5971 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5972 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
5973}
5974
5975
5976/* Opcode 0xf3 0x0f 0x6b - invalid */
5977
5978
5979/* Opcode 0x0f 0x6c - invalid */
5980
5981
5982/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
5983FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
5984{
5985 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5986 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
5987}
5988
5989
5990/* Opcode 0xf3 0x0f 0x6c - invalid */
5991/* Opcode 0xf2 0x0f 0x6c - invalid */
5992
5993
5994/* Opcode 0x0f 0x6d - invalid */
5995
5996
5997/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
5998FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
5999{
6000 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6001 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6002}
6003
6004
6005/* Opcode 0xf3 0x0f 0x6d - invalid */
6006
6007
6008FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6009{
6010 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6011 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6012 {
6013 /**
6014 * @opcode 0x6e
6015 * @opcodesub rex.w=1
6016 * @oppfx none
6017 * @opcpuid mmx
6018 * @opgroup og_mmx_datamove
6019 * @opxcpttype 5
6020 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6021 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6022 */
6023 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6024 if (IEM_IS_MODRM_REG_MODE(bRm))
6025 {
6026 /* MMX, greg64 */
6027 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6029 IEM_MC_LOCAL(uint64_t, u64Tmp);
6030
6031 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6032 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6033 IEM_MC_FPU_TO_MMX_MODE();
6034
6035 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6036 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6037
6038 IEM_MC_ADVANCE_RIP_AND_FINISH();
6039 IEM_MC_END();
6040 }
6041 else
6042 {
6043 /* MMX, [mem64] */
6044 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6046 IEM_MC_LOCAL(uint64_t, u64Tmp);
6047
6048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6050 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6051 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6052
6053 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6054 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6055 IEM_MC_FPU_TO_MMX_MODE();
6056
6057 IEM_MC_ADVANCE_RIP_AND_FINISH();
6058 IEM_MC_END();
6059 }
6060 }
6061 else
6062 {
6063 /**
6064 * @opdone
6065 * @opcode 0x6e
6066 * @opcodesub rex.w=0
6067 * @oppfx none
6068 * @opcpuid mmx
6069 * @opgroup og_mmx_datamove
6070 * @opxcpttype 5
6071 * @opfunction iemOp_movd_q_Pd_Ey
6072 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6073 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6074 */
6075 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6076 if (IEM_IS_MODRM_REG_MODE(bRm))
6077 {
6078 /* MMX, greg32 */
6079 IEM_MC_BEGIN(0, 0);
6080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6081 IEM_MC_LOCAL(uint32_t, u32Tmp);
6082
6083 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6084 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6085 IEM_MC_FPU_TO_MMX_MODE();
6086
6087 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6088 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6089
6090 IEM_MC_ADVANCE_RIP_AND_FINISH();
6091 IEM_MC_END();
6092 }
6093 else
6094 {
6095 /* MMX, [mem32] */
6096 IEM_MC_BEGIN(0, 0);
6097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6098 IEM_MC_LOCAL(uint32_t, u32Tmp);
6099
6100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6102 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6103 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6104
6105 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6106 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6107 IEM_MC_FPU_TO_MMX_MODE();
6108
6109 IEM_MC_ADVANCE_RIP_AND_FINISH();
6110 IEM_MC_END();
6111 }
6112 }
6113}
6114
6115FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6116{
6117 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6118 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6119 {
6120 /**
6121 * @opcode 0x6e
6122 * @opcodesub rex.w=1
6123 * @oppfx 0x66
6124 * @opcpuid sse2
6125 * @opgroup og_sse2_simdint_datamove
6126 * @opxcpttype 5
6127 * @optest 64-bit / op1=1 op2=2 -> op1=2
6128 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6129 */
6130 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6131 if (IEM_IS_MODRM_REG_MODE(bRm))
6132 {
6133 /* XMM, greg64 */
6134 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6136 IEM_MC_LOCAL(uint64_t, u64Tmp);
6137
6138 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6139 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6140
6141 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6142 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6143
6144 IEM_MC_ADVANCE_RIP_AND_FINISH();
6145 IEM_MC_END();
6146 }
6147 else
6148 {
6149 /* XMM, [mem64] */
6150 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6151 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6152 IEM_MC_LOCAL(uint64_t, u64Tmp);
6153
6154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6156 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6157 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6158
6159 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6160 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6161
6162 IEM_MC_ADVANCE_RIP_AND_FINISH();
6163 IEM_MC_END();
6164 }
6165 }
6166 else
6167 {
6168 /**
6169 * @opdone
6170 * @opcode 0x6e
6171 * @opcodesub rex.w=0
6172 * @oppfx 0x66
6173 * @opcpuid sse2
6174 * @opgroup og_sse2_simdint_datamove
6175 * @opxcpttype 5
6176 * @opfunction iemOp_movd_q_Vy_Ey
6177 * @optest op1=1 op2=2 -> op1=2
6178 * @optest op1=0 op2=-42 -> op1=-42
6179 */
6180 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6181 if (IEM_IS_MODRM_REG_MODE(bRm))
6182 {
6183 /* XMM, greg32 */
6184 IEM_MC_BEGIN(0, 0);
6185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6186 IEM_MC_LOCAL(uint32_t, u32Tmp);
6187
6188 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6189 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6190
6191 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6192 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6193
6194 IEM_MC_ADVANCE_RIP_AND_FINISH();
6195 IEM_MC_END();
6196 }
6197 else
6198 {
6199 /* XMM, [mem32] */
6200 IEM_MC_BEGIN(0, 0);
6201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6202 IEM_MC_LOCAL(uint32_t, u32Tmp);
6203
6204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6206 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6207 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6208
6209 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6210 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6211
6212 IEM_MC_ADVANCE_RIP_AND_FINISH();
6213 IEM_MC_END();
6214 }
6215 }
6216}
6217
6218/* Opcode 0xf3 0x0f 0x6e - invalid */
6219
6220
6221/**
6222 * @opcode 0x6f
6223 * @oppfx none
6224 * @opcpuid mmx
6225 * @opgroup og_mmx_datamove
6226 * @opxcpttype 5
6227 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6228 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6229 */
6230FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6231{
6232 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6234 if (IEM_IS_MODRM_REG_MODE(bRm))
6235 {
6236 /*
6237 * Register, register.
6238 */
6239 IEM_MC_BEGIN(0, 0);
6240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6241 IEM_MC_LOCAL(uint64_t, u64Tmp);
6242
6243 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6244 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6245 IEM_MC_FPU_TO_MMX_MODE();
6246
6247 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6248 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6249
6250 IEM_MC_ADVANCE_RIP_AND_FINISH();
6251 IEM_MC_END();
6252 }
6253 else
6254 {
6255 /*
6256 * Register, memory.
6257 */
6258 IEM_MC_BEGIN(0, 0);
6259 IEM_MC_LOCAL(uint64_t, u64Tmp);
6260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6261
6262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6264 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6265 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6266
6267 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6268 IEM_MC_FPU_TO_MMX_MODE();
6269
6270 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6271
6272 IEM_MC_ADVANCE_RIP_AND_FINISH();
6273 IEM_MC_END();
6274 }
6275}
6276
6277/**
6278 * @opcode 0x6f
6279 * @oppfx 0x66
6280 * @opcpuid sse2
6281 * @opgroup og_sse2_simdint_datamove
6282 * @opxcpttype 1
6283 * @optest op1=1 op2=2 -> op1=2
6284 * @optest op1=0 op2=-42 -> op1=-42
6285 */
6286FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6287{
6288 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6290 if (IEM_IS_MODRM_REG_MODE(bRm))
6291 {
6292 /*
6293 * Register, register.
6294 */
6295 IEM_MC_BEGIN(0, 0);
6296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6297
6298 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6299 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6300
6301 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6302 IEM_GET_MODRM_RM(pVCpu, bRm));
6303 IEM_MC_ADVANCE_RIP_AND_FINISH();
6304 IEM_MC_END();
6305 }
6306 else
6307 {
6308 /*
6309 * Register, memory.
6310 */
6311 IEM_MC_BEGIN(0, 0);
6312 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6314
6315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6317 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6318 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6319
6320 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6321 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6322
6323 IEM_MC_ADVANCE_RIP_AND_FINISH();
6324 IEM_MC_END();
6325 }
6326}
6327
6328/**
6329 * @opcode 0x6f
6330 * @oppfx 0xf3
6331 * @opcpuid sse2
6332 * @opgroup og_sse2_simdint_datamove
6333 * @opxcpttype 4UA
6334 * @optest op1=1 op2=2 -> op1=2
6335 * @optest op1=0 op2=-42 -> op1=-42
6336 */
6337FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6338{
6339 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6341 if (IEM_IS_MODRM_REG_MODE(bRm))
6342 {
6343 /*
6344 * Register, register.
6345 */
6346 IEM_MC_BEGIN(0, 0);
6347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6348 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6349 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6350 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6351 IEM_GET_MODRM_RM(pVCpu, bRm));
6352 IEM_MC_ADVANCE_RIP_AND_FINISH();
6353 IEM_MC_END();
6354 }
6355 else
6356 {
6357 /*
6358 * Register, memory.
6359 */
6360 IEM_MC_BEGIN(0, 0);
6361 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6363
6364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6366 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6367 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6368 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6369 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6370
6371 IEM_MC_ADVANCE_RIP_AND_FINISH();
6372 IEM_MC_END();
6373 }
6374}
6375
6376
6377/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6378FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6379{
6380 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6381 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6382 if (IEM_IS_MODRM_REG_MODE(bRm))
6383 {
6384 /*
6385 * Register, register.
6386 */
6387 IEM_MC_BEGIN(0, 0);
6388 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6390 IEM_MC_ARG(uint64_t *, pDst, 0);
6391 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6392 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6393 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6394 IEM_MC_PREPARE_FPU_USAGE();
6395 IEM_MC_FPU_TO_MMX_MODE();
6396
6397 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6398 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6399 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6400 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6401
6402 IEM_MC_ADVANCE_RIP_AND_FINISH();
6403 IEM_MC_END();
6404 }
6405 else
6406 {
6407 /*
6408 * Register, memory.
6409 */
6410 IEM_MC_BEGIN(0, 0);
6411 IEM_MC_ARG(uint64_t *, pDst, 0);
6412 IEM_MC_LOCAL(uint64_t, uSrc);
6413 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6415
6416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6417 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6418 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6420 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6421 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6422
6423 IEM_MC_PREPARE_FPU_USAGE();
6424 IEM_MC_FPU_TO_MMX_MODE();
6425
6426 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6427 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6428 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6429
6430 IEM_MC_ADVANCE_RIP_AND_FINISH();
6431 IEM_MC_END();
6432 }
6433}
6434
6435
6436/**
6437 * Common worker for SSE2 instructions on the forms:
6438 * pshufd xmm1, xmm2/mem128, imm8
6439 * pshufhw xmm1, xmm2/mem128, imm8
6440 * pshuflw xmm1, xmm2/mem128, imm8
6441 *
6442 * Proper alignment of the 128-bit operand is enforced.
6443 * Exceptions type 4. SSE2 cpuid checks.
6444 */
6445FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6446{
6447 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6448 if (IEM_IS_MODRM_REG_MODE(bRm))
6449 {
6450 /*
6451 * Register, register.
6452 */
6453 IEM_MC_BEGIN(0, 0);
6454 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6456 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6457 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6458 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6459 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6460 IEM_MC_PREPARE_SSE_USAGE();
6461 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6462 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6463 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6464 IEM_MC_ADVANCE_RIP_AND_FINISH();
6465 IEM_MC_END();
6466 }
6467 else
6468 {
6469 /*
6470 * Register, memory.
6471 */
6472 IEM_MC_BEGIN(0, 0);
6473 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6474 IEM_MC_LOCAL(RTUINT128U, uSrc);
6475 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6477
6478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6479 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6480 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6482 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6483
6484 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6485 IEM_MC_PREPARE_SSE_USAGE();
6486 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6487 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6488
6489 IEM_MC_ADVANCE_RIP_AND_FINISH();
6490 IEM_MC_END();
6491 }
6492}
6493
6494
6495/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6496FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6497{
6498 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6499 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6500}
6501
6502
6503/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6504FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6505{
6506 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6507 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6508}
6509
6510
6511/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6512FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6513{
6514 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6515 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6516}
6517
6518
6519/**
6520 * Common worker for MMX instructions of the form:
6521 * psrlw mm, imm8
6522 * psraw mm, imm8
6523 * psllw mm, imm8
6524 * psrld mm, imm8
6525 * psrad mm, imm8
6526 * pslld mm, imm8
6527 * psrlq mm, imm8
6528 * psllq mm, imm8
6529 *
6530 */
6531FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6532{
6533 if (IEM_IS_MODRM_REG_MODE(bRm))
6534 {
6535 /*
6536 * Register, immediate.
6537 */
6538 IEM_MC_BEGIN(0, 0);
6539 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6541 IEM_MC_ARG(uint64_t *, pDst, 0);
6542 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6543 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6544 IEM_MC_PREPARE_FPU_USAGE();
6545 IEM_MC_FPU_TO_MMX_MODE();
6546
6547 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6548 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6549 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6550
6551 IEM_MC_ADVANCE_RIP_AND_FINISH();
6552 IEM_MC_END();
6553 }
6554 else
6555 {
6556 /*
6557 * Register, memory not supported.
6558 */
6559 /// @todo Caller already enforced register mode?!
6560 AssertFailedReturn(VINF_SUCCESS);
6561 }
6562}
6563
6564
6565#if 0 /*unused*/
6566/**
6567 * Common worker for SSE2 instructions of the form:
6568 * psrlw xmm, imm8
6569 * psraw xmm, imm8
6570 * psllw xmm, imm8
6571 * psrld xmm, imm8
6572 * psrad xmm, imm8
6573 * pslld xmm, imm8
6574 * psrlq xmm, imm8
6575 * psllq xmm, imm8
6576 *
6577 */
6578FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6579{
6580 if (IEM_IS_MODRM_REG_MODE(bRm))
6581 {
6582 /*
6583 * Register, immediate.
6584 */
6585 IEM_MC_BEGIN(0, 0);
6586 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6588 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6589 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6590 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6591 IEM_MC_PREPARE_SSE_USAGE();
6592 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6593 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6594 IEM_MC_ADVANCE_RIP_AND_FINISH();
6595 IEM_MC_END();
6596 }
6597 else
6598 {
6599 /*
6600 * Register, memory.
6601 */
6602 /// @todo Caller already enforced register mode?!
6603 AssertFailedReturn(VINF_SUCCESS);
6604 }
6605}
6606#endif
6607
6608
6609/**
6610 * Preprocessor macro variant of iemOpCommonSse2_Shift_Imm
6611 */
6612#define SSE2_SHIFT_BODY_Imm(a_Ins, a_bRm, a_fRegNativeArchs) \
6613 if (IEM_IS_MODRM_REG_MODE((a_bRm))) \
6614 { \
6615 /* \
6616 * Register, immediate. \
6617 */ \
6618 IEM_MC_BEGIN(0, 0); \
6619 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
6620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2); \
6621 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
6622 IEM_MC_PREPARE_SSE_USAGE(); \
6623 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
6624 IEM_MC_NATIVE_EMIT_2(RT_CONCAT3(iemNativeEmit_,a_Ins,_ri_u128), IEM_GET_MODRM_RM(pVCpu, (a_bRm)), bImm); \
6625 } IEM_MC_NATIVE_ELSE() { \
6626 IEM_MC_ARG(PRTUINT128U, pDst, 0); \
6627 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1); \
6628 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, (a_bRm))); \
6629 IEM_MC_CALL_VOID_AIMPL_2(RT_CONCAT3(iemAImpl_,a_Ins,_imm_u128), pDst, bShiftArg); \
6630 } IEM_MC_NATIVE_ENDIF(); \
6631 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6632 IEM_MC_END(); \
6633 } \
6634 else \
6635 { \
6636 /* \
6637 * Register, memory. \
6638 */ \
6639 AssertFailedReturn(VINF_SUCCESS); \
6640 } (void)0
6641
6642
6643/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6644FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6645{
6646// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6647 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6648}
6649
6650
6651/** Opcode 0x66 0x0f 0x71 11/2. */
6652FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6653{
6654// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6655 SSE2_SHIFT_BODY_Imm(psrlw, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6656}
6657
6658
6659/** Opcode 0x0f 0x71 11/4. */
6660FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6661{
6662// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6663 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6664}
6665
6666
6667/** Opcode 0x66 0x0f 0x71 11/4. */
6668FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6669{
6670// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6671 SSE2_SHIFT_BODY_Imm(psraw, bRm, 0);
6672}
6673
6674
6675/** Opcode 0x0f 0x71 11/6. */
6676FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6677{
6678// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6679 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6680}
6681
6682
6683/** Opcode 0x66 0x0f 0x71 11/6. */
6684FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6685{
6686// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6687 SSE2_SHIFT_BODY_Imm(psllw, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6688}
6689
6690
6691/**
6692 * Group 12 jump table for register variant.
6693 */
6694IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6695{
6696 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6697 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6698 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6699 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6700 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6701 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6702 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6703 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6704};
6705AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6706
6707
6708/** Opcode 0x0f 0x71. */
6709FNIEMOP_DEF(iemOp_Grp12)
6710{
6711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6712 if (IEM_IS_MODRM_REG_MODE(bRm))
6713 /* register, register */
6714 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6715 + pVCpu->iem.s.idxPrefix], bRm);
6716 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6717}
6718
6719
6720/** Opcode 0x0f 0x72 11/2. */
6721FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6722{
6723// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6724 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6725}
6726
6727
6728/** Opcode 0x66 0x0f 0x72 11/2. */
6729FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6730{
6731// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6732 SSE2_SHIFT_BODY_Imm(psrld, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6733}
6734
6735
6736/** Opcode 0x0f 0x72 11/4. */
6737FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6738{
6739// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6740 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6741}
6742
6743
6744/** Opcode 0x66 0x0f 0x72 11/4. */
6745FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6746{
6747// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6748 SSE2_SHIFT_BODY_Imm(psrad, bRm, 0);
6749}
6750
6751
6752/** Opcode 0x0f 0x72 11/6. */
6753FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6754{
6755// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6756 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6757}
6758
6759/** Opcode 0x66 0x0f 0x72 11/6. */
6760FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6761{
6762// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6763 SSE2_SHIFT_BODY_Imm(pslld, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6764}
6765
6766
6767/**
6768 * Group 13 jump table for register variant.
6769 */
6770IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6771{
6772 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6773 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6774 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6775 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6776 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6777 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6778 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6779 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6780};
6781AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6782
6783/** Opcode 0x0f 0x72. */
6784FNIEMOP_DEF(iemOp_Grp13)
6785{
6786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6787 if (IEM_IS_MODRM_REG_MODE(bRm))
6788 /* register, register */
6789 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6790 + pVCpu->iem.s.idxPrefix], bRm);
6791 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6792}
6793
6794
6795/** Opcode 0x0f 0x73 11/2. */
6796FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6797{
6798// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6799 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6800}
6801
6802
6803/** Opcode 0x66 0x0f 0x73 11/2. */
6804FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6805{
6806// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6807 SSE2_SHIFT_BODY_Imm(psrlq, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6808}
6809
6810
6811/** Opcode 0x66 0x0f 0x73 11/3. */
6812FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6813{
6814// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6815 SSE2_SHIFT_BODY_Imm(psrldq, bRm, 0);
6816}
6817
6818
6819/** Opcode 0x0f 0x73 11/6. */
6820FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6821{
6822// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6823 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6824}
6825
6826
6827/** Opcode 0x66 0x0f 0x73 11/6. */
6828FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6829{
6830// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6831 SSE2_SHIFT_BODY_Imm(psllq, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6832}
6833
6834
6835/** Opcode 0x66 0x0f 0x73 11/7. */
6836FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6837{
6838// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6839 SSE2_SHIFT_BODY_Imm(pslldq, bRm, 0);
6840}
6841
6842/**
6843 * Group 14 jump table for register variant.
6844 */
6845IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6846{
6847 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6848 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6849 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6850 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6851 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6852 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6853 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6854 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6855};
6856AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6857
6858
6859/** Opcode 0x0f 0x73. */
6860FNIEMOP_DEF(iemOp_Grp14)
6861{
6862 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6863 if (IEM_IS_MODRM_REG_MODE(bRm))
6864 /* register, register */
6865 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6866 + pVCpu->iem.s.idxPrefix], bRm);
6867 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6868}
6869
6870
6871/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
6872FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
6873{
6874 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6875 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
6876}
6877
6878
6879/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
6880FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
6881{
6882 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6883 SSE2_OPT_BODY_FullFull_To_Full(pcmpeqb, iemAImpl_pcmpeqb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6884}
6885
6886
6887/* Opcode 0xf3 0x0f 0x74 - invalid */
6888/* Opcode 0xf2 0x0f 0x74 - invalid */
6889
6890
6891/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
6892FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
6893{
6894 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6895 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
6896}
6897
6898
6899/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
6900FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
6901{
6902 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6903 SSE2_OPT_BODY_FullFull_To_Full(pcmpeqw, iemAImpl_pcmpeqw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6904}
6905
6906
6907/* Opcode 0xf3 0x0f 0x75 - invalid */
6908/* Opcode 0xf2 0x0f 0x75 - invalid */
6909
6910
6911/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
6912FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
6913{
6914 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6915 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
6916}
6917
6918
6919/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
6920FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
6921{
6922 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6923 SSE2_OPT_BODY_FullFull_To_Full(pcmpeqd, iemAImpl_pcmpeqd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6924}
6925
6926
6927/* Opcode 0xf3 0x0f 0x76 - invalid */
6928/* Opcode 0xf2 0x0f 0x76 - invalid */
6929
6930
6931/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
6932FNIEMOP_DEF(iemOp_emms)
6933{
6934 IEMOP_MNEMONIC(emms, "emms");
6935 IEM_MC_BEGIN(0, 0);
6936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6937 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6938 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6939 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6940 IEM_MC_FPU_FROM_MMX_MODE();
6941 IEM_MC_ADVANCE_RIP_AND_FINISH();
6942 IEM_MC_END();
6943}
6944
6945/* Opcode 0x66 0x0f 0x77 - invalid */
6946/* Opcode 0xf3 0x0f 0x77 - invalid */
6947/* Opcode 0xf2 0x0f 0x77 - invalid */
6948
6949/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
6950#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6951FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
6952{
6953 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
6954 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
6955 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
6956 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
6957
6958 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6959 if (IEM_IS_MODRM_REG_MODE(bRm))
6960 {
6961 /*
6962 * Register, register.
6963 */
6964 if (enmEffOpSize == IEMMODE_64BIT)
6965 {
6966 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6967 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6968 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6969 IEM_MC_ARG(uint64_t, u64Enc, 1);
6970 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6971 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6972 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
6973 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
6974 iemCImpl_vmread_reg64, pu64Dst, u64Enc);
6975 IEM_MC_END();
6976 }
6977 else
6978 {
6979 IEM_MC_BEGIN(0, 0);
6980 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6981 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6982 IEM_MC_ARG(uint32_t, u32Enc, 1);
6983 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6984 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6985 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
6986 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
6987 iemCImpl_vmread_reg32, pu64Dst, u32Enc);
6988 IEM_MC_END();
6989 }
6990 }
6991 else
6992 {
6993 /*
6994 * Memory, register.
6995 */
6996 if (enmEffOpSize == IEMMODE_64BIT)
6997 {
6998 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6999 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7001 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7002 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7003 IEM_MC_ARG(uint64_t, u64Enc, 2);
7004 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7005 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7006 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7007 IEM_MC_END();
7008 }
7009 else
7010 {
7011 IEM_MC_BEGIN(0, 0);
7012 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7014 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7015 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7016 IEM_MC_ARG(uint32_t, u32Enc, 2);
7017 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7018 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7019 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7020 IEM_MC_END();
7021 }
7022 }
7023}
7024#else
7025FNIEMOP_UD_STUB(iemOp_vmread_Ey_Gy);
7026#endif
7027
7028/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7029FNIEMOP_STUB(iemOp_AmdGrp17);
7030/* Opcode 0xf3 0x0f 0x78 - invalid */
7031/* Opcode 0xf2 0x0f 0x78 - invalid */
7032
7033/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7034#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7035FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7036{
7037 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7038 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7039 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7040 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7041
7042 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7043 if (IEM_IS_MODRM_REG_MODE(bRm))
7044 {
7045 /*
7046 * Register, register.
7047 */
7048 if (enmEffOpSize == IEMMODE_64BIT)
7049 {
7050 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7051 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7052 IEM_MC_ARG(uint64_t, u64Val, 0);
7053 IEM_MC_ARG(uint64_t, u64Enc, 1);
7054 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7055 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7056 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u64Val, u64Enc);
7057 IEM_MC_END();
7058 }
7059 else
7060 {
7061 IEM_MC_BEGIN(0, 0);
7062 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7063 IEM_MC_ARG(uint32_t, u32Val, 0);
7064 IEM_MC_ARG(uint32_t, u32Enc, 1);
7065 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7066 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7067 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u32Val, u32Enc);
7068 IEM_MC_END();
7069 }
7070 }
7071 else
7072 {
7073 /*
7074 * Register, memory.
7075 */
7076 if (enmEffOpSize == IEMMODE_64BIT)
7077 {
7078 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7079 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7081 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7082 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7083 IEM_MC_ARG(uint64_t, u64Enc, 2);
7084 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7085 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7086 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7087 IEM_MC_END();
7088 }
7089 else
7090 {
7091 IEM_MC_BEGIN(0, 0);
7092 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7094 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7095 IEM_MC_ARG(uint32_t, u32Enc, 2);
7096 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7097 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7098 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7099 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7100 IEM_MC_END();
7101 }
7102 }
7103}
7104#else
7105FNIEMOP_UD_STUB(iemOp_vmwrite_Gy_Ey);
7106#endif
7107/* Opcode 0x66 0x0f 0x79 - invalid */
7108/* Opcode 0xf3 0x0f 0x79 - invalid */
7109/* Opcode 0xf2 0x0f 0x79 - invalid */
7110
7111/* Opcode 0x0f 0x7a - invalid */
7112/* Opcode 0x66 0x0f 0x7a - invalid */
7113/* Opcode 0xf3 0x0f 0x7a - invalid */
7114/* Opcode 0xf2 0x0f 0x7a - invalid */
7115
7116/* Opcode 0x0f 0x7b - invalid */
7117/* Opcode 0x66 0x0f 0x7b - invalid */
7118/* Opcode 0xf3 0x0f 0x7b - invalid */
7119/* Opcode 0xf2 0x0f 0x7b - invalid */
7120
7121/* Opcode 0x0f 0x7c - invalid */
7122
7123
7124/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7125FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7126{
7127 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7128 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7129}
7130
7131
7132/* Opcode 0xf3 0x0f 0x7c - invalid */
7133
7134
7135/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7136FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7137{
7138 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7139 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7140}
7141
7142
7143/* Opcode 0x0f 0x7d - invalid */
7144
7145
7146/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7147FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7148{
7149 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7150 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7151}
7152
7153
7154/* Opcode 0xf3 0x0f 0x7d - invalid */
7155
7156
7157/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7158FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7159{
7160 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7161 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7162}
7163
7164
7165/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7166FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7167{
7168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7169 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7170 {
7171 /**
7172 * @opcode 0x7e
7173 * @opcodesub rex.w=1
7174 * @oppfx none
7175 * @opcpuid mmx
7176 * @opgroup og_mmx_datamove
7177 * @opxcpttype 5
7178 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7179 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7180 */
7181 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7182 if (IEM_IS_MODRM_REG_MODE(bRm))
7183 {
7184 /* greg64, MMX */
7185 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7187 IEM_MC_LOCAL(uint64_t, u64Tmp);
7188
7189 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7190 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7191 IEM_MC_FPU_TO_MMX_MODE();
7192
7193 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7194 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7195
7196 IEM_MC_ADVANCE_RIP_AND_FINISH();
7197 IEM_MC_END();
7198 }
7199 else
7200 {
7201 /* [mem64], MMX */
7202 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7204 IEM_MC_LOCAL(uint64_t, u64Tmp);
7205
7206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7208 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7209 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7210
7211 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7212 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7213 IEM_MC_FPU_TO_MMX_MODE();
7214
7215 IEM_MC_ADVANCE_RIP_AND_FINISH();
7216 IEM_MC_END();
7217 }
7218 }
7219 else
7220 {
7221 /**
7222 * @opdone
7223 * @opcode 0x7e
7224 * @opcodesub rex.w=0
7225 * @oppfx none
7226 * @opcpuid mmx
7227 * @opgroup og_mmx_datamove
7228 * @opxcpttype 5
7229 * @opfunction iemOp_movd_q_Pd_Ey
7230 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7231 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7232 */
7233 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7234 if (IEM_IS_MODRM_REG_MODE(bRm))
7235 {
7236 /* greg32, MMX */
7237 IEM_MC_BEGIN(0, 0);
7238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7239 IEM_MC_LOCAL(uint32_t, u32Tmp);
7240
7241 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7242 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7243 IEM_MC_FPU_TO_MMX_MODE();
7244
7245 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm), 0);
7246 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7247
7248 IEM_MC_ADVANCE_RIP_AND_FINISH();
7249 IEM_MC_END();
7250 }
7251 else
7252 {
7253 /* [mem32], MMX */
7254 IEM_MC_BEGIN(0, 0);
7255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7256 IEM_MC_LOCAL(uint32_t, u32Tmp);
7257
7258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7260 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7261 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7262
7263 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm), 0);
7264 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7265 IEM_MC_FPU_TO_MMX_MODE();
7266
7267 IEM_MC_ADVANCE_RIP_AND_FINISH();
7268 IEM_MC_END();
7269 }
7270 }
7271}
7272
7273
7274FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7275{
7276 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7277 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7278 {
7279 /**
7280 * @opcode 0x7e
7281 * @opcodesub rex.w=1
7282 * @oppfx 0x66
7283 * @opcpuid sse2
7284 * @opgroup og_sse2_simdint_datamove
7285 * @opxcpttype 5
7286 * @optest 64-bit / op1=1 op2=2 -> op1=2
7287 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7288 */
7289 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7290 if (IEM_IS_MODRM_REG_MODE(bRm))
7291 {
7292 /* greg64, XMM */
7293 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7295 IEM_MC_LOCAL(uint64_t, u64Tmp);
7296
7297 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7298 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7299
7300 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7301 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7302
7303 IEM_MC_ADVANCE_RIP_AND_FINISH();
7304 IEM_MC_END();
7305 }
7306 else
7307 {
7308 /* [mem64], XMM */
7309 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7311 IEM_MC_LOCAL(uint64_t, u64Tmp);
7312
7313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7315 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7316 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7317
7318 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7319 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7320
7321 IEM_MC_ADVANCE_RIP_AND_FINISH();
7322 IEM_MC_END();
7323 }
7324 }
7325 else
7326 {
7327 /**
7328 * @opdone
7329 * @opcode 0x7e
7330 * @opcodesub rex.w=0
7331 * @oppfx 0x66
7332 * @opcpuid sse2
7333 * @opgroup og_sse2_simdint_datamove
7334 * @opxcpttype 5
7335 * @opfunction iemOp_movd_q_Vy_Ey
7336 * @optest op1=1 op2=2 -> op1=2
7337 * @optest op1=0 op2=-42 -> op1=-42
7338 */
7339 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7340 if (IEM_IS_MODRM_REG_MODE(bRm))
7341 {
7342 /* greg32, XMM */
7343 IEM_MC_BEGIN(0, 0);
7344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7345 IEM_MC_LOCAL(uint32_t, u32Tmp);
7346
7347 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7348 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7349
7350 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7351 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7352
7353 IEM_MC_ADVANCE_RIP_AND_FINISH();
7354 IEM_MC_END();
7355 }
7356 else
7357 {
7358 /* [mem32], XMM */
7359 IEM_MC_BEGIN(0, 0);
7360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7361 IEM_MC_LOCAL(uint32_t, u32Tmp);
7362
7363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7365 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7366 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7367
7368 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7369 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7370
7371 IEM_MC_ADVANCE_RIP_AND_FINISH();
7372 IEM_MC_END();
7373 }
7374 }
7375}
7376
7377/**
7378 * @opcode 0x7e
7379 * @oppfx 0xf3
7380 * @opcpuid sse2
7381 * @opgroup og_sse2_pcksclr_datamove
7382 * @opxcpttype none
7383 * @optest op1=1 op2=2 -> op1=2
7384 * @optest op1=0 op2=-42 -> op1=-42
7385 */
7386FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7387{
7388 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7390 if (IEM_IS_MODRM_REG_MODE(bRm))
7391 {
7392 /*
7393 * XMM128, XMM64.
7394 */
7395 IEM_MC_BEGIN(0, 0);
7396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7397 IEM_MC_LOCAL(uint64_t, uSrc);
7398
7399 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7400 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7401
7402 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7403 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7404
7405 IEM_MC_ADVANCE_RIP_AND_FINISH();
7406 IEM_MC_END();
7407 }
7408 else
7409 {
7410 /*
7411 * XMM128, [mem64].
7412 */
7413 IEM_MC_BEGIN(0, 0);
7414 IEM_MC_LOCAL(uint64_t, uSrc);
7415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7416
7417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7419 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7420 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7421
7422 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7423 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7424
7425 IEM_MC_ADVANCE_RIP_AND_FINISH();
7426 IEM_MC_END();
7427 }
7428}
7429
7430/* Opcode 0xf2 0x0f 0x7e - invalid */
7431
7432
7433/** Opcode 0x0f 0x7f - movq Qq, Pq */
7434FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7435{
7436 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7437 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7438 if (IEM_IS_MODRM_REG_MODE(bRm))
7439 {
7440 /*
7441 * MMX, MMX.
7442 */
7443 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7444 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7445 IEM_MC_BEGIN(0, 0);
7446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7447 IEM_MC_LOCAL(uint64_t, u64Tmp);
7448 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7449 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7450 IEM_MC_FPU_TO_MMX_MODE();
7451
7452 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7453 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7454
7455 IEM_MC_ADVANCE_RIP_AND_FINISH();
7456 IEM_MC_END();
7457 }
7458 else
7459 {
7460 /*
7461 * [mem64], MMX.
7462 */
7463 IEM_MC_BEGIN(0, 0);
7464 IEM_MC_LOCAL(uint64_t, u64Tmp);
7465 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7466
7467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7469 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7470 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7471
7472 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7473 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7474 IEM_MC_FPU_TO_MMX_MODE();
7475
7476 IEM_MC_ADVANCE_RIP_AND_FINISH();
7477 IEM_MC_END();
7478 }
7479}
7480
7481/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7482FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7483{
7484 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7485 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7486 if (IEM_IS_MODRM_REG_MODE(bRm))
7487 {
7488 /*
7489 * XMM, XMM.
7490 */
7491 IEM_MC_BEGIN(0, 0);
7492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7493 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7494 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7495 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7496 IEM_GET_MODRM_REG(pVCpu, bRm));
7497 IEM_MC_ADVANCE_RIP_AND_FINISH();
7498 IEM_MC_END();
7499 }
7500 else
7501 {
7502 /*
7503 * [mem128], XMM.
7504 */
7505 IEM_MC_BEGIN(0, 0);
7506 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7508
7509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7511 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7512 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7513
7514 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7515 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7516
7517 IEM_MC_ADVANCE_RIP_AND_FINISH();
7518 IEM_MC_END();
7519 }
7520}
7521
7522/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7523FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7524{
7525 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7526 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7527 if (IEM_IS_MODRM_REG_MODE(bRm))
7528 {
7529 /*
7530 * XMM, XMM.
7531 */
7532 IEM_MC_BEGIN(0, 0);
7533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7534 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7535 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7536 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7537 IEM_GET_MODRM_REG(pVCpu, bRm));
7538 IEM_MC_ADVANCE_RIP_AND_FINISH();
7539 IEM_MC_END();
7540 }
7541 else
7542 {
7543 /*
7544 * [mem128], XMM.
7545 */
7546 IEM_MC_BEGIN(0, 0);
7547 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7549
7550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7552 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7553 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7554
7555 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7556 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7557
7558 IEM_MC_ADVANCE_RIP_AND_FINISH();
7559 IEM_MC_END();
7560 }
7561}
7562
7563/* Opcode 0xf2 0x0f 0x7f - invalid */
7564
7565
7566/**
7567 * @opcode 0x80
7568 * @opfltest of
7569 */
7570FNIEMOP_DEF(iemOp_jo_Jv)
7571{
7572 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7573 IEMOP_HLP_MIN_386();
7574 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7575 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7576 {
7577 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7578 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7580 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7581 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7582 } IEM_MC_ELSE() {
7583 IEM_MC_ADVANCE_RIP_AND_FINISH();
7584 } IEM_MC_ENDIF();
7585 IEM_MC_END();
7586 }
7587 else
7588 {
7589 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7590 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7592 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7593 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7594 } IEM_MC_ELSE() {
7595 IEM_MC_ADVANCE_RIP_AND_FINISH();
7596 } IEM_MC_ENDIF();
7597 IEM_MC_END();
7598 }
7599}
7600
7601
7602/**
7603 * @opcode 0x81
7604 * @opfltest of
7605 */
7606FNIEMOP_DEF(iemOp_jno_Jv)
7607{
7608 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7609 IEMOP_HLP_MIN_386();
7610 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7611 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7612 {
7613 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7614 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7616 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7617 IEM_MC_ADVANCE_RIP_AND_FINISH();
7618 } IEM_MC_ELSE() {
7619 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7620 } IEM_MC_ENDIF();
7621 IEM_MC_END();
7622 }
7623 else
7624 {
7625 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7626 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7628 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7629 IEM_MC_ADVANCE_RIP_AND_FINISH();
7630 } IEM_MC_ELSE() {
7631 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7632 } IEM_MC_ENDIF();
7633 IEM_MC_END();
7634 }
7635}
7636
7637
7638/**
7639 * @opcode 0x82
7640 * @opfltest cf
7641 */
7642FNIEMOP_DEF(iemOp_jc_Jv)
7643{
7644 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7645 IEMOP_HLP_MIN_386();
7646 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7647 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7648 {
7649 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7650 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7652 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7653 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7654 } IEM_MC_ELSE() {
7655 IEM_MC_ADVANCE_RIP_AND_FINISH();
7656 } IEM_MC_ENDIF();
7657 IEM_MC_END();
7658 }
7659 else
7660 {
7661 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7662 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7664 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7665 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7666 } IEM_MC_ELSE() {
7667 IEM_MC_ADVANCE_RIP_AND_FINISH();
7668 } IEM_MC_ENDIF();
7669 IEM_MC_END();
7670 }
7671}
7672
7673
7674/**
7675 * @opcode 0x83
7676 * @opfltest cf
7677 */
7678FNIEMOP_DEF(iemOp_jnc_Jv)
7679{
7680 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7681 IEMOP_HLP_MIN_386();
7682 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7683 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7684 {
7685 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7686 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7688 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7689 IEM_MC_ADVANCE_RIP_AND_FINISH();
7690 } IEM_MC_ELSE() {
7691 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7692 } IEM_MC_ENDIF();
7693 IEM_MC_END();
7694 }
7695 else
7696 {
7697 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7698 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7700 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7701 IEM_MC_ADVANCE_RIP_AND_FINISH();
7702 } IEM_MC_ELSE() {
7703 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7704 } IEM_MC_ENDIF();
7705 IEM_MC_END();
7706 }
7707}
7708
7709
7710/**
7711 * @opcode 0x84
7712 * @opfltest zf
7713 */
7714FNIEMOP_DEF(iemOp_je_Jv)
7715{
7716 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7717 IEMOP_HLP_MIN_386();
7718 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7719 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7720 {
7721 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7722 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7724 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7725 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7726 } IEM_MC_ELSE() {
7727 IEM_MC_ADVANCE_RIP_AND_FINISH();
7728 } IEM_MC_ENDIF();
7729 IEM_MC_END();
7730 }
7731 else
7732 {
7733 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7734 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7736 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7737 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7738 } IEM_MC_ELSE() {
7739 IEM_MC_ADVANCE_RIP_AND_FINISH();
7740 } IEM_MC_ENDIF();
7741 IEM_MC_END();
7742 }
7743}
7744
7745
7746/**
7747 * @opcode 0x85
7748 * @opfltest zf
7749 */
7750FNIEMOP_DEF(iemOp_jne_Jv)
7751{
7752 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7753 IEMOP_HLP_MIN_386();
7754 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7755 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7756 {
7757 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7758 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7760 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7761 IEM_MC_ADVANCE_RIP_AND_FINISH();
7762 } IEM_MC_ELSE() {
7763 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7764 } IEM_MC_ENDIF();
7765 IEM_MC_END();
7766 }
7767 else
7768 {
7769 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7770 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7772 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7773 IEM_MC_ADVANCE_RIP_AND_FINISH();
7774 } IEM_MC_ELSE() {
7775 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7776 } IEM_MC_ENDIF();
7777 IEM_MC_END();
7778 }
7779}
7780
7781
7782/**
7783 * @opcode 0x86
7784 * @opfltest cf,zf
7785 */
7786FNIEMOP_DEF(iemOp_jbe_Jv)
7787{
7788 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7789 IEMOP_HLP_MIN_386();
7790 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7791 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7792 {
7793 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7794 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7796 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7797 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7798 } IEM_MC_ELSE() {
7799 IEM_MC_ADVANCE_RIP_AND_FINISH();
7800 } IEM_MC_ENDIF();
7801 IEM_MC_END();
7802 }
7803 else
7804 {
7805 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7806 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7808 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7809 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7810 } IEM_MC_ELSE() {
7811 IEM_MC_ADVANCE_RIP_AND_FINISH();
7812 } IEM_MC_ENDIF();
7813 IEM_MC_END();
7814 }
7815}
7816
7817
7818/**
7819 * @opcode 0x87
7820 * @opfltest cf,zf
7821 */
7822FNIEMOP_DEF(iemOp_jnbe_Jv)
7823{
7824 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7825 IEMOP_HLP_MIN_386();
7826 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7827 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7828 {
7829 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7830 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7832 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7833 IEM_MC_ADVANCE_RIP_AND_FINISH();
7834 } IEM_MC_ELSE() {
7835 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7836 } IEM_MC_ENDIF();
7837 IEM_MC_END();
7838 }
7839 else
7840 {
7841 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7842 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7844 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7845 IEM_MC_ADVANCE_RIP_AND_FINISH();
7846 } IEM_MC_ELSE() {
7847 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7848 } IEM_MC_ENDIF();
7849 IEM_MC_END();
7850 }
7851}
7852
7853
7854/**
7855 * @opcode 0x88
7856 * @opfltest sf
7857 */
7858FNIEMOP_DEF(iemOp_js_Jv)
7859{
7860 IEMOP_MNEMONIC(js_Jv, "js Jv");
7861 IEMOP_HLP_MIN_386();
7862 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7863 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7864 {
7865 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7866 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7868 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7869 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7870 } IEM_MC_ELSE() {
7871 IEM_MC_ADVANCE_RIP_AND_FINISH();
7872 } IEM_MC_ENDIF();
7873 IEM_MC_END();
7874 }
7875 else
7876 {
7877 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7878 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7880 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7881 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7882 } IEM_MC_ELSE() {
7883 IEM_MC_ADVANCE_RIP_AND_FINISH();
7884 } IEM_MC_ENDIF();
7885 IEM_MC_END();
7886 }
7887}
7888
7889
7890/**
7891 * @opcode 0x89
7892 * @opfltest sf
7893 */
7894FNIEMOP_DEF(iemOp_jns_Jv)
7895{
7896 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
7897 IEMOP_HLP_MIN_386();
7898 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7899 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7900 {
7901 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7902 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7904 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7905 IEM_MC_ADVANCE_RIP_AND_FINISH();
7906 } IEM_MC_ELSE() {
7907 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7908 } IEM_MC_ENDIF();
7909 IEM_MC_END();
7910 }
7911 else
7912 {
7913 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7914 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7916 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7917 IEM_MC_ADVANCE_RIP_AND_FINISH();
7918 } IEM_MC_ELSE() {
7919 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7920 } IEM_MC_ENDIF();
7921 IEM_MC_END();
7922 }
7923}
7924
7925
7926/**
7927 * @opcode 0x8a
7928 * @opfltest pf
7929 */
7930FNIEMOP_DEF(iemOp_jp_Jv)
7931{
7932 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
7933 IEMOP_HLP_MIN_386();
7934 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7935 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7936 {
7937 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7938 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7940 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7941 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7942 } IEM_MC_ELSE() {
7943 IEM_MC_ADVANCE_RIP_AND_FINISH();
7944 } IEM_MC_ENDIF();
7945 IEM_MC_END();
7946 }
7947 else
7948 {
7949 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7950 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7952 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7953 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7954 } IEM_MC_ELSE() {
7955 IEM_MC_ADVANCE_RIP_AND_FINISH();
7956 } IEM_MC_ENDIF();
7957 IEM_MC_END();
7958 }
7959}
7960
7961
7962/**
7963 * @opcode 0x8b
7964 * @opfltest pf
7965 */
7966FNIEMOP_DEF(iemOp_jnp_Jv)
7967{
7968 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
7969 IEMOP_HLP_MIN_386();
7970 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7971 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7972 {
7973 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7974 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7976 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7977 IEM_MC_ADVANCE_RIP_AND_FINISH();
7978 } IEM_MC_ELSE() {
7979 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7980 } IEM_MC_ENDIF();
7981 IEM_MC_END();
7982 }
7983 else
7984 {
7985 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7986 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7988 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7989 IEM_MC_ADVANCE_RIP_AND_FINISH();
7990 } IEM_MC_ELSE() {
7991 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7992 } IEM_MC_ENDIF();
7993 IEM_MC_END();
7994 }
7995}
7996
7997
7998/**
7999 * @opcode 0x8c
8000 * @opfltest sf,of
8001 */
8002FNIEMOP_DEF(iemOp_jl_Jv)
8003{
8004 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8005 IEMOP_HLP_MIN_386();
8006 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8007 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8008 {
8009 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8010 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8012 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8013 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8014 } IEM_MC_ELSE() {
8015 IEM_MC_ADVANCE_RIP_AND_FINISH();
8016 } IEM_MC_ENDIF();
8017 IEM_MC_END();
8018 }
8019 else
8020 {
8021 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8022 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8024 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8025 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8026 } IEM_MC_ELSE() {
8027 IEM_MC_ADVANCE_RIP_AND_FINISH();
8028 } IEM_MC_ENDIF();
8029 IEM_MC_END();
8030 }
8031}
8032
8033
8034/**
8035 * @opcode 0x8d
8036 * @opfltest sf,of
8037 */
8038FNIEMOP_DEF(iemOp_jnl_Jv)
8039{
8040 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8041 IEMOP_HLP_MIN_386();
8042 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8043 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8044 {
8045 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8046 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8048 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8049 IEM_MC_ADVANCE_RIP_AND_FINISH();
8050 } IEM_MC_ELSE() {
8051 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8052 } IEM_MC_ENDIF();
8053 IEM_MC_END();
8054 }
8055 else
8056 {
8057 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8058 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8060 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8061 IEM_MC_ADVANCE_RIP_AND_FINISH();
8062 } IEM_MC_ELSE() {
8063 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8064 } IEM_MC_ENDIF();
8065 IEM_MC_END();
8066 }
8067}
8068
8069
8070/**
8071 * @opcode 0x8e
8072 * @opfltest zf,sf,of
8073 */
8074FNIEMOP_DEF(iemOp_jle_Jv)
8075{
8076 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8077 IEMOP_HLP_MIN_386();
8078 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8079 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8080 {
8081 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8082 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8084 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8085 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8086 } IEM_MC_ELSE() {
8087 IEM_MC_ADVANCE_RIP_AND_FINISH();
8088 } IEM_MC_ENDIF();
8089 IEM_MC_END();
8090 }
8091 else
8092 {
8093 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8094 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8096 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8097 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8098 } IEM_MC_ELSE() {
8099 IEM_MC_ADVANCE_RIP_AND_FINISH();
8100 } IEM_MC_ENDIF();
8101 IEM_MC_END();
8102 }
8103}
8104
8105
8106/**
8107 * @opcode 0x8f
8108 * @opfltest zf,sf,of
8109 */
8110FNIEMOP_DEF(iemOp_jnle_Jv)
8111{
8112 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8113 IEMOP_HLP_MIN_386();
8114 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8115 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8116 {
8117 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8118 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8120 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8121 IEM_MC_ADVANCE_RIP_AND_FINISH();
8122 } IEM_MC_ELSE() {
8123 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8124 } IEM_MC_ENDIF();
8125 IEM_MC_END();
8126 }
8127 else
8128 {
8129 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8130 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8132 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8133 IEM_MC_ADVANCE_RIP_AND_FINISH();
8134 } IEM_MC_ELSE() {
8135 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8136 } IEM_MC_ENDIF();
8137 IEM_MC_END();
8138 }
8139}
8140
8141
8142/**
8143 * @opcode 0x90
8144 * @opfltest of
8145 */
8146FNIEMOP_DEF(iemOp_seto_Eb)
8147{
8148 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8149 IEMOP_HLP_MIN_386();
8150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8151
8152 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8153 * any way. AMD says it's "unused", whatever that means. We're
8154 * ignoring for now. */
8155 if (IEM_IS_MODRM_REG_MODE(bRm))
8156 {
8157 /* register target */
8158 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8160 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8161 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8162 } IEM_MC_ELSE() {
8163 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8164 } IEM_MC_ENDIF();
8165 IEM_MC_ADVANCE_RIP_AND_FINISH();
8166 IEM_MC_END();
8167 }
8168 else
8169 {
8170 /* memory target */
8171 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8175 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8176 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8177 } IEM_MC_ELSE() {
8178 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8179 } IEM_MC_ENDIF();
8180 IEM_MC_ADVANCE_RIP_AND_FINISH();
8181 IEM_MC_END();
8182 }
8183}
8184
8185
8186/**
8187 * @opcode 0x91
8188 * @opfltest of
8189 */
8190FNIEMOP_DEF(iemOp_setno_Eb)
8191{
8192 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8193 IEMOP_HLP_MIN_386();
8194 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8195
8196 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8197 * any way. AMD says it's "unused", whatever that means. We're
8198 * ignoring for now. */
8199 if (IEM_IS_MODRM_REG_MODE(bRm))
8200 {
8201 /* register target */
8202 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8204 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8205 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8206 } IEM_MC_ELSE() {
8207 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8208 } IEM_MC_ENDIF();
8209 IEM_MC_ADVANCE_RIP_AND_FINISH();
8210 IEM_MC_END();
8211 }
8212 else
8213 {
8214 /* memory target */
8215 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8219 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8220 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8221 } IEM_MC_ELSE() {
8222 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8223 } IEM_MC_ENDIF();
8224 IEM_MC_ADVANCE_RIP_AND_FINISH();
8225 IEM_MC_END();
8226 }
8227}
8228
8229
8230/**
8231 * @opcode 0x92
8232 * @opfltest cf
8233 */
8234FNIEMOP_DEF(iemOp_setc_Eb)
8235{
8236 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8237 IEMOP_HLP_MIN_386();
8238 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8239
8240 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8241 * any way. AMD says it's "unused", whatever that means. We're
8242 * ignoring for now. */
8243 if (IEM_IS_MODRM_REG_MODE(bRm))
8244 {
8245 /* register target */
8246 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8248 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8249 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8250 } IEM_MC_ELSE() {
8251 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8252 } IEM_MC_ENDIF();
8253 IEM_MC_ADVANCE_RIP_AND_FINISH();
8254 IEM_MC_END();
8255 }
8256 else
8257 {
8258 /* memory target */
8259 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8263 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8264 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8265 } IEM_MC_ELSE() {
8266 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8267 } IEM_MC_ENDIF();
8268 IEM_MC_ADVANCE_RIP_AND_FINISH();
8269 IEM_MC_END();
8270 }
8271}
8272
8273
8274/**
8275 * @opcode 0x93
8276 * @opfltest cf
8277 */
8278FNIEMOP_DEF(iemOp_setnc_Eb)
8279{
8280 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8281 IEMOP_HLP_MIN_386();
8282 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8283
8284 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8285 * any way. AMD says it's "unused", whatever that means. We're
8286 * ignoring for now. */
8287 if (IEM_IS_MODRM_REG_MODE(bRm))
8288 {
8289 /* register target */
8290 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8292 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8293 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8294 } IEM_MC_ELSE() {
8295 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8296 } IEM_MC_ENDIF();
8297 IEM_MC_ADVANCE_RIP_AND_FINISH();
8298 IEM_MC_END();
8299 }
8300 else
8301 {
8302 /* memory target */
8303 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8307 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8308 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8309 } IEM_MC_ELSE() {
8310 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8311 } IEM_MC_ENDIF();
8312 IEM_MC_ADVANCE_RIP_AND_FINISH();
8313 IEM_MC_END();
8314 }
8315}
8316
8317
8318/**
8319 * @opcode 0x94
8320 * @opfltest zf
8321 */
8322FNIEMOP_DEF(iemOp_sete_Eb)
8323{
8324 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8325 IEMOP_HLP_MIN_386();
8326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8327
8328 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8329 * any way. AMD says it's "unused", whatever that means. We're
8330 * ignoring for now. */
8331 if (IEM_IS_MODRM_REG_MODE(bRm))
8332 {
8333 /* register target */
8334 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8336 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8337 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8338 } IEM_MC_ELSE() {
8339 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8340 } IEM_MC_ENDIF();
8341 IEM_MC_ADVANCE_RIP_AND_FINISH();
8342 IEM_MC_END();
8343 }
8344 else
8345 {
8346 /* memory target */
8347 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8351 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8352 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8353 } IEM_MC_ELSE() {
8354 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8355 } IEM_MC_ENDIF();
8356 IEM_MC_ADVANCE_RIP_AND_FINISH();
8357 IEM_MC_END();
8358 }
8359}
8360
8361
8362/**
8363 * @opcode 0x95
8364 * @opfltest zf
8365 */
8366FNIEMOP_DEF(iemOp_setne_Eb)
8367{
8368 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8369 IEMOP_HLP_MIN_386();
8370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8371
8372 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8373 * any way. AMD says it's "unused", whatever that means. We're
8374 * ignoring for now. */
8375 if (IEM_IS_MODRM_REG_MODE(bRm))
8376 {
8377 /* register target */
8378 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8380 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8381 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8382 } IEM_MC_ELSE() {
8383 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8384 } IEM_MC_ENDIF();
8385 IEM_MC_ADVANCE_RIP_AND_FINISH();
8386 IEM_MC_END();
8387 }
8388 else
8389 {
8390 /* memory target */
8391 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8395 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8396 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8397 } IEM_MC_ELSE() {
8398 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8399 } IEM_MC_ENDIF();
8400 IEM_MC_ADVANCE_RIP_AND_FINISH();
8401 IEM_MC_END();
8402 }
8403}
8404
8405
8406/**
8407 * @opcode 0x96
8408 * @opfltest cf,zf
8409 */
8410FNIEMOP_DEF(iemOp_setbe_Eb)
8411{
8412 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8413 IEMOP_HLP_MIN_386();
8414 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8415
8416 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8417 * any way. AMD says it's "unused", whatever that means. We're
8418 * ignoring for now. */
8419 if (IEM_IS_MODRM_REG_MODE(bRm))
8420 {
8421 /* register target */
8422 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8424 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8425 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8426 } IEM_MC_ELSE() {
8427 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8428 } IEM_MC_ENDIF();
8429 IEM_MC_ADVANCE_RIP_AND_FINISH();
8430 IEM_MC_END();
8431 }
8432 else
8433 {
8434 /* memory target */
8435 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8439 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8440 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8441 } IEM_MC_ELSE() {
8442 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8443 } IEM_MC_ENDIF();
8444 IEM_MC_ADVANCE_RIP_AND_FINISH();
8445 IEM_MC_END();
8446 }
8447}
8448
8449
8450/**
8451 * @opcode 0x97
8452 * @opfltest cf,zf
8453 */
8454FNIEMOP_DEF(iemOp_setnbe_Eb)
8455{
8456 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8457 IEMOP_HLP_MIN_386();
8458 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8459
8460 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8461 * any way. AMD says it's "unused", whatever that means. We're
8462 * ignoring for now. */
8463 if (IEM_IS_MODRM_REG_MODE(bRm))
8464 {
8465 /* register target */
8466 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8468 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8469 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8470 } IEM_MC_ELSE() {
8471 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8472 } IEM_MC_ENDIF();
8473 IEM_MC_ADVANCE_RIP_AND_FINISH();
8474 IEM_MC_END();
8475 }
8476 else
8477 {
8478 /* memory target */
8479 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8483 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8484 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8485 } IEM_MC_ELSE() {
8486 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8487 } IEM_MC_ENDIF();
8488 IEM_MC_ADVANCE_RIP_AND_FINISH();
8489 IEM_MC_END();
8490 }
8491}
8492
8493
8494/**
8495 * @opcode 0x98
8496 * @opfltest sf
8497 */
8498FNIEMOP_DEF(iemOp_sets_Eb)
8499{
8500 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8501 IEMOP_HLP_MIN_386();
8502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8503
8504 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8505 * any way. AMD says it's "unused", whatever that means. We're
8506 * ignoring for now. */
8507 if (IEM_IS_MODRM_REG_MODE(bRm))
8508 {
8509 /* register target */
8510 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8512 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8513 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8514 } IEM_MC_ELSE() {
8515 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8516 } IEM_MC_ENDIF();
8517 IEM_MC_ADVANCE_RIP_AND_FINISH();
8518 IEM_MC_END();
8519 }
8520 else
8521 {
8522 /* memory target */
8523 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8527 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8528 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8529 } IEM_MC_ELSE() {
8530 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8531 } IEM_MC_ENDIF();
8532 IEM_MC_ADVANCE_RIP_AND_FINISH();
8533 IEM_MC_END();
8534 }
8535}
8536
8537
8538/**
8539 * @opcode 0x99
8540 * @opfltest sf
8541 */
8542FNIEMOP_DEF(iemOp_setns_Eb)
8543{
8544 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8545 IEMOP_HLP_MIN_386();
8546 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8547
8548 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8549 * any way. AMD says it's "unused", whatever that means. We're
8550 * ignoring for now. */
8551 if (IEM_IS_MODRM_REG_MODE(bRm))
8552 {
8553 /* register target */
8554 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8556 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8557 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8558 } IEM_MC_ELSE() {
8559 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8560 } IEM_MC_ENDIF();
8561 IEM_MC_ADVANCE_RIP_AND_FINISH();
8562 IEM_MC_END();
8563 }
8564 else
8565 {
8566 /* memory target */
8567 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8571 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8572 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8573 } IEM_MC_ELSE() {
8574 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8575 } IEM_MC_ENDIF();
8576 IEM_MC_ADVANCE_RIP_AND_FINISH();
8577 IEM_MC_END();
8578 }
8579}
8580
8581
8582/**
8583 * @opcode 0x9a
8584 * @opfltest pf
8585 */
8586FNIEMOP_DEF(iemOp_setp_Eb)
8587{
8588 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8589 IEMOP_HLP_MIN_386();
8590 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8591
8592 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8593 * any way. AMD says it's "unused", whatever that means. We're
8594 * ignoring for now. */
8595 if (IEM_IS_MODRM_REG_MODE(bRm))
8596 {
8597 /* register target */
8598 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8600 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8601 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8602 } IEM_MC_ELSE() {
8603 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8604 } IEM_MC_ENDIF();
8605 IEM_MC_ADVANCE_RIP_AND_FINISH();
8606 IEM_MC_END();
8607 }
8608 else
8609 {
8610 /* memory target */
8611 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8615 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8616 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8617 } IEM_MC_ELSE() {
8618 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8619 } IEM_MC_ENDIF();
8620 IEM_MC_ADVANCE_RIP_AND_FINISH();
8621 IEM_MC_END();
8622 }
8623}
8624
8625
8626/**
8627 * @opcode 0x9b
8628 * @opfltest pf
8629 */
8630FNIEMOP_DEF(iemOp_setnp_Eb)
8631{
8632 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8633 IEMOP_HLP_MIN_386();
8634 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8635
8636 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8637 * any way. AMD says it's "unused", whatever that means. We're
8638 * ignoring for now. */
8639 if (IEM_IS_MODRM_REG_MODE(bRm))
8640 {
8641 /* register target */
8642 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8644 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8645 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8646 } IEM_MC_ELSE() {
8647 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8648 } IEM_MC_ENDIF();
8649 IEM_MC_ADVANCE_RIP_AND_FINISH();
8650 IEM_MC_END();
8651 }
8652 else
8653 {
8654 /* memory target */
8655 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8659 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8660 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8661 } IEM_MC_ELSE() {
8662 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8663 } IEM_MC_ENDIF();
8664 IEM_MC_ADVANCE_RIP_AND_FINISH();
8665 IEM_MC_END();
8666 }
8667}
8668
8669
8670/**
8671 * @opcode 0x9c
8672 * @opfltest sf,of
8673 */
8674FNIEMOP_DEF(iemOp_setl_Eb)
8675{
8676 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8677 IEMOP_HLP_MIN_386();
8678 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8679
8680 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8681 * any way. AMD says it's "unused", whatever that means. We're
8682 * ignoring for now. */
8683 if (IEM_IS_MODRM_REG_MODE(bRm))
8684 {
8685 /* register target */
8686 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8688 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8689 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8690 } IEM_MC_ELSE() {
8691 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8692 } IEM_MC_ENDIF();
8693 IEM_MC_ADVANCE_RIP_AND_FINISH();
8694 IEM_MC_END();
8695 }
8696 else
8697 {
8698 /* memory target */
8699 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8703 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8704 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8705 } IEM_MC_ELSE() {
8706 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8707 } IEM_MC_ENDIF();
8708 IEM_MC_ADVANCE_RIP_AND_FINISH();
8709 IEM_MC_END();
8710 }
8711}
8712
8713
8714/**
8715 * @opcode 0x9d
8716 * @opfltest sf,of
8717 */
8718FNIEMOP_DEF(iemOp_setnl_Eb)
8719{
8720 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8721 IEMOP_HLP_MIN_386();
8722 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8723
8724 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8725 * any way. AMD says it's "unused", whatever that means. We're
8726 * ignoring for now. */
8727 if (IEM_IS_MODRM_REG_MODE(bRm))
8728 {
8729 /* register target */
8730 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8732 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8733 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8734 } IEM_MC_ELSE() {
8735 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8736 } IEM_MC_ENDIF();
8737 IEM_MC_ADVANCE_RIP_AND_FINISH();
8738 IEM_MC_END();
8739 }
8740 else
8741 {
8742 /* memory target */
8743 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8747 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8748 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8749 } IEM_MC_ELSE() {
8750 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8751 } IEM_MC_ENDIF();
8752 IEM_MC_ADVANCE_RIP_AND_FINISH();
8753 IEM_MC_END();
8754 }
8755}
8756
8757
8758/**
8759 * @opcode 0x9e
8760 * @opfltest zf,sf,of
8761 */
8762FNIEMOP_DEF(iemOp_setle_Eb)
8763{
8764 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8765 IEMOP_HLP_MIN_386();
8766 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8767
8768 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8769 * any way. AMD says it's "unused", whatever that means. We're
8770 * ignoring for now. */
8771 if (IEM_IS_MODRM_REG_MODE(bRm))
8772 {
8773 /* register target */
8774 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8776 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8777 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8778 } IEM_MC_ELSE() {
8779 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8780 } IEM_MC_ENDIF();
8781 IEM_MC_ADVANCE_RIP_AND_FINISH();
8782 IEM_MC_END();
8783 }
8784 else
8785 {
8786 /* memory target */
8787 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8791 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8792 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8793 } IEM_MC_ELSE() {
8794 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8795 } IEM_MC_ENDIF();
8796 IEM_MC_ADVANCE_RIP_AND_FINISH();
8797 IEM_MC_END();
8798 }
8799}
8800
8801
8802/**
8803 * @opcode 0x9f
8804 * @opfltest zf,sf,of
8805 */
8806FNIEMOP_DEF(iemOp_setnle_Eb)
8807{
8808 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8809 IEMOP_HLP_MIN_386();
8810 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8811
8812 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8813 * any way. AMD says it's "unused", whatever that means. We're
8814 * ignoring for now. */
8815 if (IEM_IS_MODRM_REG_MODE(bRm))
8816 {
8817 /* register target */
8818 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8820 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8821 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8822 } IEM_MC_ELSE() {
8823 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8824 } IEM_MC_ENDIF();
8825 IEM_MC_ADVANCE_RIP_AND_FINISH();
8826 IEM_MC_END();
8827 }
8828 else
8829 {
8830 /* memory target */
8831 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8835 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8836 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8837 } IEM_MC_ELSE() {
8838 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8839 } IEM_MC_ENDIF();
8840 IEM_MC_ADVANCE_RIP_AND_FINISH();
8841 IEM_MC_END();
8842 }
8843}
8844
8845
8846/** Opcode 0x0f 0xa0. */
8847FNIEMOP_DEF(iemOp_push_fs)
8848{
8849 IEMOP_MNEMONIC(push_fs, "push fs");
8850 IEMOP_HLP_MIN_386();
8851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8852 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8853}
8854
8855
8856/** Opcode 0x0f 0xa1. */
8857FNIEMOP_DEF(iemOp_pop_fs)
8858{
8859 IEMOP_MNEMONIC(pop_fs, "pop fs");
8860 IEMOP_HLP_MIN_386();
8861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8862 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8863 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
8864 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8865 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8866 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8867 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8868 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS),
8869 iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8870}
8871
8872
8873/** Opcode 0x0f 0xa2. */
8874FNIEMOP_DEF(iemOp_cpuid)
8875{
8876 IEMOP_MNEMONIC(cpuid, "cpuid");
8877 IEMOP_HLP_MIN_486(); /* not all 486es. */
8878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8879 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
8880 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8881 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
8882 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
8883 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX),
8884 iemCImpl_cpuid);
8885}
8886
8887
8888/**
8889 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8890 * iemOp_bts_Ev_Gv.
8891 */
8892
8893#define IEMOP_BODY_BIT_Ev_Gv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
8894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8895 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8896 \
8897 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8898 { \
8899 /* register destination. */ \
8900 switch (pVCpu->iem.s.enmEffOpSize) \
8901 { \
8902 case IEMMODE_16BIT: \
8903 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8905 \
8906 IEM_MC_ARG(uint16_t, u16Src, 2); \
8907 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8908 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
8909 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
8910 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8911 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8912 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
8913 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8914 \
8915 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8916 IEM_MC_END(); \
8917 break; \
8918 \
8919 case IEMMODE_32BIT: \
8920 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8922 \
8923 IEM_MC_ARG(uint32_t, u32Src, 2); \
8924 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8925 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
8926 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
8927 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8928 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8929 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
8930 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8931 \
8932 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8933 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8934 IEM_MC_END(); \
8935 break; \
8936 \
8937 case IEMMODE_64BIT: \
8938 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
8939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8940 \
8941 IEM_MC_ARG(uint64_t, u64Src, 2); \
8942 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8943 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
8944 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
8945 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8946 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8947 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
8948 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8949 \
8950 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8951 IEM_MC_END(); \
8952 break; \
8953 \
8954 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8955 } \
8956 } \
8957 else \
8958 { \
8959 /* memory destination. */ \
8960 /** @todo test negative bit offsets! */ \
8961 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
8962 { \
8963 switch (pVCpu->iem.s.enmEffOpSize) \
8964 { \
8965 case IEMMODE_16BIT: \
8966 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8969 IEMOP_HLP_DONE_DECODING(); \
8970 \
8971 IEM_MC_ARG(uint16_t, u16Src, 2); \
8972 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8973 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
8974 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
8975 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
8976 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
8977 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
8978 \
8979 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8980 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
8981 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8982 \
8983 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8984 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
8985 \
8986 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8987 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8988 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8989 IEM_MC_END(); \
8990 break; \
8991 \
8992 case IEMMODE_32BIT: \
8993 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8994 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8996 IEMOP_HLP_DONE_DECODING(); \
8997 \
8998 IEM_MC_ARG(uint32_t, u32Src, 2); \
8999 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9000 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9001 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9002 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9003 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9004 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9005 \
9006 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9007 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
9008 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9009 \
9010 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9011 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
9012 \
9013 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9014 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9015 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9016 IEM_MC_END(); \
9017 break; \
9018 \
9019 case IEMMODE_64BIT: \
9020 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9023 IEMOP_HLP_DONE_DECODING(); \
9024 \
9025 IEM_MC_ARG(uint64_t, u64Src, 2); \
9026 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9027 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9028 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9029 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9030 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9031 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9032 \
9033 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9034 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
9035 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9036 \
9037 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9038 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
9039 \
9040 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9041 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9042 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9043 IEM_MC_END(); \
9044 break; \
9045 \
9046 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9047 } \
9048 } \
9049 else \
9050 { \
9051 (void)0
9052/* Separate macro to work around parsing issue in IEMAllInstPython.py */
9053#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
9054 switch (pVCpu->iem.s.enmEffOpSize) \
9055 { \
9056 case IEMMODE_16BIT: \
9057 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9060 IEMOP_HLP_DONE_DECODING(); \
9061 \
9062 IEM_MC_ARG(uint16_t, u16Src, 2); \
9063 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9064 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9065 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9066 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9067 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9068 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9069 \
9070 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9071 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
9072 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9073 \
9074 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9075 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU16, fEFlagsIn, pu16Dst, u16Src); \
9076 \
9077 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9078 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9079 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9080 IEM_MC_END(); \
9081 break; \
9082 \
9083 case IEMMODE_32BIT: \
9084 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9087 IEMOP_HLP_DONE_DECODING(); \
9088 \
9089 IEM_MC_ARG(uint32_t, u32Src, 2); \
9090 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9091 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9092 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9093 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9094 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9095 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9096 \
9097 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9098 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
9099 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9100 \
9101 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9102 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU32, fEFlagsIn, pu32Dst, u32Src); \
9103 \
9104 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9105 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9106 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9107 IEM_MC_END(); \
9108 break; \
9109 \
9110 case IEMMODE_64BIT: \
9111 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9114 IEMOP_HLP_DONE_DECODING(); \
9115 \
9116 IEM_MC_ARG(uint64_t, u64Src, 2); \
9117 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9118 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9119 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9120 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9121 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9122 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9123 \
9124 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9125 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
9126 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9127 \
9128 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9129 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU64, fEFlagsIn, pu64Dst, u64Src); \
9130 \
9131 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9132 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9133 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9134 IEM_MC_END(); \
9135 break; \
9136 \
9137 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9138 } \
9139 } \
9140 } \
9141 (void)0
9142
9143/* Read-only version (bt). */
9144#define IEMOP_BODY_BIT_Ev_Gv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
9145 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9146 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
9147 \
9148 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9149 { \
9150 /* register destination. */ \
9151 switch (pVCpu->iem.s.enmEffOpSize) \
9152 { \
9153 case IEMMODE_16BIT: \
9154 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9156 \
9157 IEM_MC_ARG(uint16_t, u16Src, 2); \
9158 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9159 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9160 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
9161 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9162 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9163 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
9164 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9165 \
9166 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9167 IEM_MC_END(); \
9168 break; \
9169 \
9170 case IEMMODE_32BIT: \
9171 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9173 \
9174 IEM_MC_ARG(uint32_t, u32Src, 2); \
9175 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9176 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9177 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
9178 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9179 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9180 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
9181 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9182 \
9183 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9184 IEM_MC_END(); \
9185 break; \
9186 \
9187 case IEMMODE_64BIT: \
9188 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9190 \
9191 IEM_MC_ARG(uint64_t, u64Src, 2); \
9192 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9193 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9194 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
9195 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9196 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9197 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
9198 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9199 \
9200 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9201 IEM_MC_END(); \
9202 break; \
9203 \
9204 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9205 } \
9206 } \
9207 else \
9208 { \
9209 /* memory destination. */ \
9210 /** @todo test negative bit offsets! */ \
9211 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9212 { \
9213 switch (pVCpu->iem.s.enmEffOpSize) \
9214 { \
9215 case IEMMODE_16BIT: \
9216 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9219 IEMOP_HLP_DONE_DECODING(); \
9220 \
9221 IEM_MC_ARG(uint16_t, u16Src, 2); \
9222 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9223 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9224 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9225 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9226 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9227 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9228 \
9229 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9230 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
9231 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9232 \
9233 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9234 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
9235 \
9236 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9237 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9238 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9239 IEM_MC_END(); \
9240 break; \
9241 \
9242 case IEMMODE_32BIT: \
9243 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9246 IEMOP_HLP_DONE_DECODING(); \
9247 \
9248 IEM_MC_ARG(uint32_t, u32Src, 2); \
9249 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9250 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9251 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9252 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9253 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9254 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9255 \
9256 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
9257 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9258 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9259 \
9260 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9261 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
9262 \
9263 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9264 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9265 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9266 IEM_MC_END(); \
9267 break; \
9268 \
9269 case IEMMODE_64BIT: \
9270 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9273 IEMOP_HLP_DONE_DECODING(); \
9274 \
9275 IEM_MC_ARG(uint64_t, u64Src, 2); \
9276 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9277 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9278 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9279 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9280 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9281 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9282 \
9283 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9284 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
9285 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9286 \
9287 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9288 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
9289 \
9290 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9291 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9292 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9293 IEM_MC_END(); \
9294 break; \
9295 \
9296 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9297 } \
9298 } \
9299 else \
9300 { \
9301 IEMOP_HLP_DONE_DECODING(); \
9302 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9303 } \
9304 } \
9305 (void)0
9306
9307
9308/**
9309 * @opcode 0xa3
9310 * @oppfx n/a
9311 * @opflclass bitmap
9312 */
9313FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9314{
9315 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9316 IEMOP_HLP_MIN_386();
9317 IEMOP_BODY_BIT_Ev_Gv_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
9318}
9319
9320
9321/**
9322 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9323 */
9324#define IEMOP_BODY_SHLD_SHR_Ib(a_pImplExpr) \
9325 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9326 \
9327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9328 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9329 \
9330 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9331 { \
9332 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9333 \
9334 switch (pVCpu->iem.s.enmEffOpSize) \
9335 { \
9336 case IEMMODE_16BIT: \
9337 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9339 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9340 IEM_MC_ARG(uint16_t, u16Src, 1); \
9341 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9342 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9343 \
9344 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9345 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9346 IEM_MC_REF_EFLAGS(pEFlags); \
9347 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9348 \
9349 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9350 IEM_MC_END(); \
9351 break; \
9352 \
9353 case IEMMODE_32BIT: \
9354 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9356 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9357 IEM_MC_ARG(uint32_t, u32Src, 1); \
9358 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9359 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9360 \
9361 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9362 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9363 IEM_MC_REF_EFLAGS(pEFlags); \
9364 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9365 \
9366 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9367 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9368 IEM_MC_END(); \
9369 break; \
9370 \
9371 case IEMMODE_64BIT: \
9372 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9374 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9375 IEM_MC_ARG(uint64_t, u64Src, 1); \
9376 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9377 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9378 \
9379 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9380 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9381 IEM_MC_REF_EFLAGS(pEFlags); \
9382 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9383 \
9384 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9385 IEM_MC_END(); \
9386 break; \
9387 \
9388 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9389 } \
9390 } \
9391 else \
9392 { \
9393 switch (pVCpu->iem.s.enmEffOpSize) \
9394 { \
9395 case IEMMODE_16BIT: \
9396 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9399 \
9400 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9402 \
9403 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9404 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9405 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9406 \
9407 IEM_MC_ARG(uint16_t, u16Src, 1); \
9408 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9409 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9410 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9411 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9412 \
9413 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9414 IEM_MC_COMMIT_EFLAGS(EFlags); \
9415 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9416 IEM_MC_END(); \
9417 break; \
9418 \
9419 case IEMMODE_32BIT: \
9420 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9423 \
9424 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9426 \
9427 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9428 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9429 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9430 \
9431 IEM_MC_ARG(uint32_t, u32Src, 1); \
9432 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9433 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9434 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9435 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9436 \
9437 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9438 IEM_MC_COMMIT_EFLAGS(EFlags); \
9439 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9440 IEM_MC_END(); \
9441 break; \
9442 \
9443 case IEMMODE_64BIT: \
9444 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9445 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9447 \
9448 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9450 \
9451 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9452 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9453 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9454 \
9455 IEM_MC_ARG(uint64_t, u64Src, 1); \
9456 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9457 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9458 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9459 \
9460 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9461 \
9462 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9463 IEM_MC_COMMIT_EFLAGS(EFlags); \
9464 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9465 IEM_MC_END(); \
9466 break; \
9467 \
9468 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9469 } \
9470 } (void)0
9471
9472
9473/**
9474 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9475 */
9476#define IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(a_pImplExpr) \
9477 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9478 \
9479 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9480 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9481 \
9482 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9483 { \
9484 switch (pVCpu->iem.s.enmEffOpSize) \
9485 { \
9486 case IEMMODE_16BIT: \
9487 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9489 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9490 IEM_MC_ARG(uint16_t, u16Src, 1); \
9491 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9492 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9493 \
9494 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9495 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9496 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9497 IEM_MC_REF_EFLAGS(pEFlags); \
9498 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9499 \
9500 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9501 IEM_MC_END(); \
9502 break; \
9503 \
9504 case IEMMODE_32BIT: \
9505 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9507 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9508 IEM_MC_ARG(uint32_t, u32Src, 1); \
9509 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9510 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9511 \
9512 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9513 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9514 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9515 IEM_MC_REF_EFLAGS(pEFlags); \
9516 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9517 \
9518 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9519 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9520 IEM_MC_END(); \
9521 break; \
9522 \
9523 case IEMMODE_64BIT: \
9524 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9526 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9527 IEM_MC_ARG(uint64_t, u64Src, 1); \
9528 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9529 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9530 \
9531 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9532 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9533 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9534 IEM_MC_REF_EFLAGS(pEFlags); \
9535 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9536 \
9537 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9538 IEM_MC_END(); \
9539 break; \
9540 \
9541 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9542 } \
9543 } \
9544 else \
9545 { \
9546 switch (pVCpu->iem.s.enmEffOpSize) \
9547 { \
9548 case IEMMODE_16BIT: \
9549 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9550 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9551 IEM_MC_ARG(uint16_t, u16Src, 1); \
9552 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9554 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9555 \
9556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9558 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9559 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9560 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9561 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9562 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9563 \
9564 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9565 IEM_MC_COMMIT_EFLAGS(EFlags); \
9566 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9567 IEM_MC_END(); \
9568 break; \
9569 \
9570 case IEMMODE_32BIT: \
9571 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9572 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9573 IEM_MC_ARG(uint32_t, u32Src, 1); \
9574 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9576 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9577 \
9578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9580 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9581 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9582 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9583 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9584 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9585 \
9586 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9587 IEM_MC_COMMIT_EFLAGS(EFlags); \
9588 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9589 IEM_MC_END(); \
9590 break; \
9591 \
9592 case IEMMODE_64BIT: \
9593 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9594 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9595 IEM_MC_ARG(uint64_t, u64Src, 1); \
9596 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9598 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9599 \
9600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9602 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9603 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9604 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9605 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9606 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9607 \
9608 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9609 IEM_MC_COMMIT_EFLAGS(EFlags); \
9610 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9611 IEM_MC_END(); \
9612 break; \
9613 \
9614 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9615 } \
9616 } (void)0
9617
9618
9619/**
9620 * @opcode 0xa4
9621 * @opflclass shift_count
9622 */
9623FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9624{
9625 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9626 IEMOP_HLP_MIN_386();
9627 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9628}
9629
9630
9631/**
9632 * @opcode 0xa5
9633 * @opflclass shift_count
9634 */
9635FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9636{
9637 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9638 IEMOP_HLP_MIN_386();
9639 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9640}
9641
9642
9643/** Opcode 0x0f 0xa8. */
9644FNIEMOP_DEF(iemOp_push_gs)
9645{
9646 IEMOP_MNEMONIC(push_gs, "push gs");
9647 IEMOP_HLP_MIN_386();
9648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9649 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9650}
9651
9652
9653/** Opcode 0x0f 0xa9. */
9654FNIEMOP_DEF(iemOp_pop_gs)
9655{
9656 IEMOP_MNEMONIC(pop_gs, "pop gs");
9657 IEMOP_HLP_MIN_386();
9658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9659 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9660 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
9661 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9662 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9663 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9664 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9665 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9666 iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9667}
9668
9669
9670/** Opcode 0x0f 0xaa. */
9671FNIEMOP_DEF(iemOp_rsm)
9672{
9673 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9674 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9676 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9677 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
9678 iemCImpl_rsm);
9679}
9680
9681
9682
9683/**
9684 * @opcode 0xab
9685 * @oppfx n/a
9686 * @opflclass bitmap
9687 */
9688FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9689{
9690 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9691 IEMOP_HLP_MIN_386();
9692 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
9693 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9694}
9695
9696
9697/**
9698 * @opcode 0xac
9699 * @opflclass shift_count
9700 */
9701FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9702{
9703 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9704 IEMOP_HLP_MIN_386();
9705 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9706}
9707
9708
9709/**
9710 * @opcode 0xad
9711 * @opflclass shift_count
9712 */
9713FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9714{
9715 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9716 IEMOP_HLP_MIN_386();
9717 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9718}
9719
9720
9721/** Opcode 0x0f 0xae mem/0. */
9722FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9723{
9724 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9725 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9726 IEMOP_RAISE_INVALID_OPCODE_RET();
9727
9728 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9729 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9732 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9733 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9734 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9735 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9736 IEM_MC_END();
9737}
9738
9739
9740/** Opcode 0x0f 0xae mem/1. */
9741FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9742{
9743 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9744 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9745 IEMOP_RAISE_INVALID_OPCODE_RET();
9746
9747 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9748 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9751 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9752 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9753 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9754 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw) | RT_BIT_64(kIemNativeGstReg_MxCsr),
9755 iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9756 IEM_MC_END();
9757}
9758
9759
9760/**
9761 * @opmaps grp15
9762 * @opcode !11/2
9763 * @oppfx none
9764 * @opcpuid sse
9765 * @opgroup og_sse_mxcsrsm
9766 * @opxcpttype 5
9767 * @optest op1=0 -> mxcsr=0
9768 * @optest op1=0x2083 -> mxcsr=0x2083
9769 * @optest op1=0xfffffffe -> value.xcpt=0xd
9770 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9771 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9772 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9773 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9774 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9775 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9776 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9777 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9778 */
9779FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9780{
9781 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9782 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9783 IEMOP_RAISE_INVALID_OPCODE_RET();
9784
9785 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9786 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9789 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9790 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9791 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_MxCsr), iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9792 IEM_MC_END();
9793}
9794
9795
9796/**
9797 * @opmaps grp15
9798 * @opcode !11/3
9799 * @oppfx none
9800 * @opcpuid sse
9801 * @opgroup og_sse_mxcsrsm
9802 * @opxcpttype 5
9803 * @optest mxcsr=0 -> op1=0
9804 * @optest mxcsr=0x2083 -> op1=0x2083
9805 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9806 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9807 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9808 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9809 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9810 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9811 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9812 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9813 */
9814FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9815{
9816 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9817 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9818 IEMOP_RAISE_INVALID_OPCODE_RET();
9819
9820 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9821 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9824 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9825 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9826 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9827 IEM_MC_END();
9828}
9829
9830
9831/**
9832 * @opmaps grp15
9833 * @opcode !11/4
9834 * @oppfx none
9835 * @opcpuid xsave
9836 * @opgroup og_system
9837 * @opxcpttype none
9838 */
9839FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9840{
9841 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9842 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9843 IEMOP_RAISE_INVALID_OPCODE_RET();
9844
9845 IEM_MC_BEGIN(IEM_MC_F_MIN_CORE, 0);
9846 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9849 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9850 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9851 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9852 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9853 IEM_MC_END();
9854}
9855
9856
9857/**
9858 * @opmaps grp15
9859 * @opcode !11/5
9860 * @oppfx none
9861 * @opcpuid xsave
9862 * @opgroup og_system
9863 * @opxcpttype none
9864 */
9865FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9866{
9867 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9868 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9869 IEMOP_RAISE_INVALID_OPCODE_RET();
9870
9871 IEM_MC_BEGIN(IEM_MC_F_MIN_CORE, 0);
9872 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9875 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9876 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9877 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9878 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw) | RT_BIT_64(kIemNativeGstReg_MxCsr),
9879 iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9880 IEM_MC_END();
9881}
9882
9883/** Opcode 0x0f 0xae mem/6. */
9884FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9885
9886/**
9887 * @opmaps grp15
9888 * @opcode !11/7
9889 * @oppfx none
9890 * @opcpuid clfsh
9891 * @opgroup og_cachectl
9892 * @optest op1=1 ->
9893 */
9894FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9895{
9896 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9897 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9898 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9899
9900 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9901 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9904 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9905 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9906 IEM_MC_END();
9907}
9908
9909/**
9910 * @opmaps grp15
9911 * @opcode !11/7
9912 * @oppfx 0x66
9913 * @opcpuid clflushopt
9914 * @opgroup og_cachectl
9915 * @optest op1=1 ->
9916 */
9917FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9918{
9919 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9920 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9921 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9922
9923 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9924 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9927 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9928 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9929 IEM_MC_END();
9930}
9931
9932
9933/** Opcode 0x0f 0xae 11b/5. */
9934FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9935{
9936 RT_NOREF_PV(bRm);
9937 IEMOP_MNEMONIC(lfence, "lfence");
9938 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9940#ifdef RT_ARCH_ARM64
9941 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9942#else
9943 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9944 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9945 else
9946 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9947#endif
9948 IEM_MC_ADVANCE_RIP_AND_FINISH();
9949 IEM_MC_END();
9950}
9951
9952
9953/** Opcode 0x0f 0xae 11b/6. */
9954FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9955{
9956 RT_NOREF_PV(bRm);
9957 IEMOP_MNEMONIC(mfence, "mfence");
9958 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9960#ifdef RT_ARCH_ARM64
9961 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9962#else
9963 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9964 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9965 else
9966 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9967#endif
9968 IEM_MC_ADVANCE_RIP_AND_FINISH();
9969 IEM_MC_END();
9970}
9971
9972
9973/** Opcode 0x0f 0xae 11b/7. */
9974FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9975{
9976 RT_NOREF_PV(bRm);
9977 IEMOP_MNEMONIC(sfence, "sfence");
9978 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9980#ifdef RT_ARCH_ARM64
9981 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9982#else
9983 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9984 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9985 else
9986 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9987#endif
9988 IEM_MC_ADVANCE_RIP_AND_FINISH();
9989 IEM_MC_END();
9990}
9991
9992
9993/** Opcode 0xf3 0x0f 0xae 11b/0. */
9994FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9995{
9996 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9997 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9998 {
9999 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10001 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10002 IEM_MC_LOCAL(uint64_t, u64Dst);
10003 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
10004 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10005 IEM_MC_ADVANCE_RIP_AND_FINISH();
10006 IEM_MC_END();
10007 }
10008 else
10009 {
10010 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10012 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10013 IEM_MC_LOCAL(uint32_t, u32Dst);
10014 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
10015 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10016 IEM_MC_ADVANCE_RIP_AND_FINISH();
10017 IEM_MC_END();
10018 }
10019}
10020
10021
10022/** Opcode 0xf3 0x0f 0xae 11b/1. */
10023FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
10024{
10025 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
10026 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10027 {
10028 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10030 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10031 IEM_MC_LOCAL(uint64_t, u64Dst);
10032 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
10033 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10034 IEM_MC_ADVANCE_RIP_AND_FINISH();
10035 IEM_MC_END();
10036 }
10037 else
10038 {
10039 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10041 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10042 IEM_MC_LOCAL(uint32_t, u32Dst);
10043 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
10044 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10045 IEM_MC_ADVANCE_RIP_AND_FINISH();
10046 IEM_MC_END();
10047 }
10048}
10049
10050
10051/** Opcode 0xf3 0x0f 0xae 11b/2. */
10052FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
10053{
10054 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
10055 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10056 {
10057 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10059 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10060 IEM_MC_LOCAL(uint64_t, u64Dst);
10061 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10062 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10063 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
10064 IEM_MC_ADVANCE_RIP_AND_FINISH();
10065 IEM_MC_END();
10066 }
10067 else
10068 {
10069 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10071 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10072 IEM_MC_LOCAL(uint32_t, u32Dst);
10073 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10074 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
10075 IEM_MC_ADVANCE_RIP_AND_FINISH();
10076 IEM_MC_END();
10077 }
10078}
10079
10080
10081/** Opcode 0xf3 0x0f 0xae 11b/3. */
10082FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
10083{
10084 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
10085 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10086 {
10087 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10089 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10090 IEM_MC_LOCAL(uint64_t, u64Dst);
10091 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10092 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10093 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
10094 IEM_MC_ADVANCE_RIP_AND_FINISH();
10095 IEM_MC_END();
10096 }
10097 else
10098 {
10099 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10101 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10102 IEM_MC_LOCAL(uint32_t, u32Dst);
10103 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10104 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10105 IEM_MC_ADVANCE_RIP_AND_FINISH();
10106 IEM_MC_END();
10107 }
10108}
10109
10110
10111/**
10112 * Group 15 jump table for register variant.
10113 */
10114IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10115{ /* pfx: none, 066h, 0f3h, 0f2h */
10116 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10117 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10118 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10119 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10120 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10121 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10122 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10123 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10124};
10125AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10126
10127
10128/**
10129 * Group 15 jump table for memory variant.
10130 */
10131IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10132{ /* pfx: none, 066h, 0f3h, 0f2h */
10133 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10134 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10135 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10136 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10137 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10138 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10139 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10140 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10141};
10142AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10143
10144
10145/** Opcode 0x0f 0xae. */
10146FNIEMOP_DEF(iemOp_Grp15)
10147{
10148 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10149 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10150 if (IEM_IS_MODRM_REG_MODE(bRm))
10151 /* register, register */
10152 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10153 + pVCpu->iem.s.idxPrefix], bRm);
10154 /* memory, register */
10155 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10156 + pVCpu->iem.s.idxPrefix], bRm);
10157}
10158
10159
10160/**
10161 * @opcode 0xaf
10162 * @opflclass multiply
10163 */
10164FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10165{
10166 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10167 IEMOP_HLP_MIN_386();
10168 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10169 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10170 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10171 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_MIN_386, imul, 0);
10172}
10173
10174
10175/**
10176 * @opcode 0xb0
10177 * @opflclass arithmetic
10178 */
10179FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10180{
10181 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10182 IEMOP_HLP_MIN_486();
10183 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10184
10185 if (IEM_IS_MODRM_REG_MODE(bRm))
10186 {
10187 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10189 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10190 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10191 IEM_MC_ARG(uint8_t, u8Src, 2);
10192 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10193
10194 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10195 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10196 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10197 IEM_MC_REF_EFLAGS(pEFlags);
10198 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10199
10200 IEM_MC_ADVANCE_RIP_AND_FINISH();
10201 IEM_MC_END();
10202 }
10203 else
10204 {
10205#define IEMOP_BODY_CMPXCHG_BYTE(a_fnWorker, a_Type) \
10206 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10209 IEMOP_HLP_DONE_DECODING(); \
10210 \
10211 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10212 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
10213 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10214 \
10215 IEM_MC_ARG(uint8_t, u8Src, 2); \
10216 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10217 \
10218 IEM_MC_LOCAL(uint8_t, u8Al); \
10219 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX); \
10220 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Al, u8Al, 1); \
10221 \
10222 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10223 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu8Dst, pu8Al, u8Src, pEFlags); \
10224 \
10225 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10226 IEM_MC_COMMIT_EFLAGS(EFlags); \
10227 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al); \
10228 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10229 IEM_MC_END()
10230
10231 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10232 {
10233 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8,RW);
10234 }
10235 else
10236 {
10237 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8_locked,ATOMIC);
10238 }
10239 }
10240}
10241
10242/**
10243 * @opcode 0xb1
10244 * @opflclass arithmetic
10245 */
10246FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10247{
10248 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10249 IEMOP_HLP_MIN_486();
10250 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10251
10252 if (IEM_IS_MODRM_REG_MODE(bRm))
10253 {
10254 switch (pVCpu->iem.s.enmEffOpSize)
10255 {
10256 case IEMMODE_16BIT:
10257 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10259 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10260 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10261 IEM_MC_ARG(uint16_t, u16Src, 2);
10262 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10263
10264 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10265 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10266 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10267 IEM_MC_REF_EFLAGS(pEFlags);
10268 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10269
10270 IEM_MC_ADVANCE_RIP_AND_FINISH();
10271 IEM_MC_END();
10272 break;
10273
10274 case IEMMODE_32BIT:
10275 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10277 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10278 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10279 IEM_MC_ARG(uint32_t, u32Src, 2);
10280 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10281
10282 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10283 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10284 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10285 IEM_MC_REF_EFLAGS(pEFlags);
10286 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10287
10288 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10289 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
10290 } IEM_MC_ELSE() {
10291 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
10292 } IEM_MC_ENDIF();
10293
10294 IEM_MC_ADVANCE_RIP_AND_FINISH();
10295 IEM_MC_END();
10296 break;
10297
10298 case IEMMODE_64BIT:
10299 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10301 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10302 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10303 IEM_MC_ARG(uint64_t, u64Src, 2);
10304 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10305
10306 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10307 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10308 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10309 IEM_MC_REF_EFLAGS(pEFlags);
10310 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10311
10312 IEM_MC_ADVANCE_RIP_AND_FINISH();
10313 IEM_MC_END();
10314 break;
10315
10316 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10317 }
10318 }
10319 else
10320 {
10321#define IEMOP_BODY_CMPXCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64,a_Type) \
10322 do { \
10323 switch (pVCpu->iem.s.enmEffOpSize) \
10324 { \
10325 case IEMMODE_16BIT: \
10326 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10327 \
10328 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10331 IEMOP_HLP_DONE_DECODING(); \
10332 \
10333 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10334 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10335 \
10336 IEM_MC_ARG(uint16_t, u16Src, 2); \
10337 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10338 \
10339 IEM_MC_LOCAL(uint16_t, u16Ax); \
10340 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX); \
10341 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Ax, u16Ax, 1); \
10342 \
10343 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10344 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker16, pu16Dst, pu16Ax, u16Src, pEFlags); \
10345 \
10346 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10347 IEM_MC_COMMIT_EFLAGS(EFlags); \
10348 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax); \
10349 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10350 IEM_MC_END(); \
10351 break; \
10352 \
10353 case IEMMODE_32BIT: \
10354 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10357 IEMOP_HLP_DONE_DECODING(); \
10358 \
10359 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10360 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10361 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10362 \
10363 IEM_MC_ARG(uint32_t, u32Src, 2); \
10364 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10365 \
10366 IEM_MC_LOCAL(uint32_t, u32Eax); \
10367 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX); \
10368 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Eax, u32Eax, 1); \
10369 \
10370 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10371 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker32, pu32Dst, pu32Eax, u32Src, pEFlags); \
10372 \
10373 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10374 IEM_MC_COMMIT_EFLAGS(EFlags); \
10375 \
10376 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
10377 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax); \
10378 } IEM_MC_ENDIF(); \
10379 \
10380 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10381 IEM_MC_END(); \
10382 break; \
10383 \
10384 case IEMMODE_64BIT: \
10385 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10388 IEMOP_HLP_DONE_DECODING(); \
10389 \
10390 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10391 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10392 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10393 \
10394 IEM_MC_ARG(uint64_t, u64Src, 2); \
10395 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10396 \
10397 IEM_MC_LOCAL(uint64_t, u64Rax); \
10398 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX); \
10399 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Rax, u64Rax, 1); \
10400 \
10401 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10402 \
10403 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker64, pu64Dst, pu64Rax, u64Src, pEFlags); \
10404 \
10405 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10406 IEM_MC_COMMIT_EFLAGS(EFlags); \
10407 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax); \
10408 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10409 IEM_MC_END(); \
10410 break; \
10411 \
10412 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10413 } \
10414 } while (0)
10415
10416 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10417 {
10418 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16, iemAImpl_cmpxchg_u32, iemAImpl_cmpxchg_u64,RW);
10419 }
10420 else
10421 {
10422 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16_locked, iemAImpl_cmpxchg_u32_locked, iemAImpl_cmpxchg_u64_locked,ATOMIC);
10423 }
10424 }
10425}
10426
10427
10428/** Opcode 0x0f 0xb2. */
10429FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10430{
10431 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10432 IEMOP_HLP_MIN_386();
10433 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10434 if (IEM_IS_MODRM_REG_MODE(bRm))
10435 IEMOP_RAISE_INVALID_OPCODE_RET();
10436 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10437}
10438
10439
10440/**
10441 * @opcode 0xb3
10442 * @oppfx n/a
10443 * @opflclass bitmap
10444 */
10445FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10446{
10447 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10448 IEMOP_HLP_MIN_386();
10449 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
10450 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10451}
10452
10453
10454/** Opcode 0x0f 0xb4. */
10455FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10456{
10457 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10458 IEMOP_HLP_MIN_386();
10459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10460 if (IEM_IS_MODRM_REG_MODE(bRm))
10461 IEMOP_RAISE_INVALID_OPCODE_RET();
10462 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10463}
10464
10465
10466/** Opcode 0x0f 0xb5. */
10467FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10468{
10469 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10470 IEMOP_HLP_MIN_386();
10471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10472 if (IEM_IS_MODRM_REG_MODE(bRm))
10473 IEMOP_RAISE_INVALID_OPCODE_RET();
10474 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10475}
10476
10477
10478/** Opcode 0x0f 0xb6. */
10479FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10480{
10481 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10482 IEMOP_HLP_MIN_386();
10483
10484 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10485
10486 /*
10487 * If rm is denoting a register, no more instruction bytes.
10488 */
10489 if (IEM_IS_MODRM_REG_MODE(bRm))
10490 {
10491 switch (pVCpu->iem.s.enmEffOpSize)
10492 {
10493 case IEMMODE_16BIT:
10494 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10496 IEM_MC_LOCAL(uint16_t, u16Value);
10497 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10498 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10499 IEM_MC_ADVANCE_RIP_AND_FINISH();
10500 IEM_MC_END();
10501 break;
10502
10503 case IEMMODE_32BIT:
10504 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10506 IEM_MC_LOCAL(uint32_t, u32Value);
10507 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10508 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10509 IEM_MC_ADVANCE_RIP_AND_FINISH();
10510 IEM_MC_END();
10511 break;
10512
10513 case IEMMODE_64BIT:
10514 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10516 IEM_MC_LOCAL(uint64_t, u64Value);
10517 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10518 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10519 IEM_MC_ADVANCE_RIP_AND_FINISH();
10520 IEM_MC_END();
10521 break;
10522
10523 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10524 }
10525 }
10526 else
10527 {
10528 /*
10529 * We're loading a register from memory.
10530 */
10531 switch (pVCpu->iem.s.enmEffOpSize)
10532 {
10533 case IEMMODE_16BIT:
10534 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10535 IEM_MC_LOCAL(uint16_t, u16Value);
10536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10539 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10540 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10541 IEM_MC_ADVANCE_RIP_AND_FINISH();
10542 IEM_MC_END();
10543 break;
10544
10545 case IEMMODE_32BIT:
10546 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10547 IEM_MC_LOCAL(uint32_t, u32Value);
10548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10551 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10552 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10553 IEM_MC_ADVANCE_RIP_AND_FINISH();
10554 IEM_MC_END();
10555 break;
10556
10557 case IEMMODE_64BIT:
10558 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10559 IEM_MC_LOCAL(uint64_t, u64Value);
10560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10563 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10564 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10565 IEM_MC_ADVANCE_RIP_AND_FINISH();
10566 IEM_MC_END();
10567 break;
10568
10569 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10570 }
10571 }
10572}
10573
10574
10575/** Opcode 0x0f 0xb7. */
10576FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10577{
10578 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10579 IEMOP_HLP_MIN_386();
10580
10581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10582
10583 /** @todo Not entirely sure how the operand size prefix is handled here,
10584 * assuming that it will be ignored. Would be nice to have a few
10585 * test for this. */
10586
10587 /** @todo There should be no difference in the behaviour whether REX.W is
10588 * present or not... */
10589
10590 /*
10591 * If rm is denoting a register, no more instruction bytes.
10592 */
10593 if (IEM_IS_MODRM_REG_MODE(bRm))
10594 {
10595 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10596 {
10597 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10599 IEM_MC_LOCAL(uint32_t, u32Value);
10600 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10601 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10602 IEM_MC_ADVANCE_RIP_AND_FINISH();
10603 IEM_MC_END();
10604 }
10605 else
10606 {
10607 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10609 IEM_MC_LOCAL(uint64_t, u64Value);
10610 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10611 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10612 IEM_MC_ADVANCE_RIP_AND_FINISH();
10613 IEM_MC_END();
10614 }
10615 }
10616 else
10617 {
10618 /*
10619 * We're loading a register from memory.
10620 */
10621 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10622 {
10623 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10624 IEM_MC_LOCAL(uint32_t, u32Value);
10625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10628 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10629 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10630 IEM_MC_ADVANCE_RIP_AND_FINISH();
10631 IEM_MC_END();
10632 }
10633 else
10634 {
10635 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10636 IEM_MC_LOCAL(uint64_t, u64Value);
10637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10640 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10641 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10642 IEM_MC_ADVANCE_RIP_AND_FINISH();
10643 IEM_MC_END();
10644 }
10645 }
10646}
10647
10648
10649/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10650FNIEMOP_UD_STUB(iemOp_jmpe);
10651
10652
10653/**
10654 * @opcode 0xb8
10655 * @oppfx 0xf3
10656 * @opflmodify cf,pf,af,zf,sf,of
10657 * @opflclear cf,pf,af,sf,of
10658 */
10659FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10660{
10661 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10662 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10663 return iemOp_InvalidNeedRM(pVCpu);
10664#ifndef TST_IEM_CHECK_MC
10665# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10666 static const IEMOPBINSIZES s_Native =
10667 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10668# endif
10669 static const IEMOPBINSIZES s_Fallback =
10670 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10671#endif
10672 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10674 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, popcnt, 0);
10675}
10676
10677
10678/**
10679 * @opcode 0xb9
10680 * @opinvalid intel-modrm
10681 * @optest ->
10682 */
10683FNIEMOP_DEF(iemOp_Grp10)
10684{
10685 /*
10686 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10687 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10688 */
10689 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10690 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10691 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10692}
10693
10694
10695/**
10696 * Body for group 8 bit instruction.
10697 */
10698#define IEMOP_BODY_BIT_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10699 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10700 \
10701 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10702 { \
10703 /* register destination. */ \
10704 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10705 \
10706 switch (pVCpu->iem.s.enmEffOpSize) \
10707 { \
10708 case IEMMODE_16BIT: \
10709 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10711 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10712 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10713 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10714 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10715 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10716 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10717 \
10718 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10719 IEM_MC_END(); \
10720 break; \
10721 \
10722 case IEMMODE_32BIT: \
10723 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10725 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10726 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10727 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10728 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10729 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10730 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10731 \
10732 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
10733 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10734 IEM_MC_END(); \
10735 break; \
10736 \
10737 case IEMMODE_64BIT: \
10738 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10740 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10741 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10742 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10743 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10744 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10745 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10746 \
10747 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10748 IEM_MC_END(); \
10749 break; \
10750 \
10751 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10752 } \
10753 } \
10754 else \
10755 { \
10756 /* memory destination. */ \
10757 /** @todo test negative bit offsets! */ \
10758 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
10759 { \
10760 switch (pVCpu->iem.s.enmEffOpSize) \
10761 { \
10762 case IEMMODE_16BIT: \
10763 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10766 \
10767 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10768 IEMOP_HLP_DONE_DECODING(); \
10769 \
10770 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10771 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10772 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10773 \
10774 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10775 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10776 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10777 \
10778 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10779 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10780 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10781 IEM_MC_END(); \
10782 break; \
10783 \
10784 case IEMMODE_32BIT: \
10785 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10788 \
10789 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10790 IEMOP_HLP_DONE_DECODING(); \
10791 \
10792 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10793 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10794 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10795 \
10796 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10797 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10798 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10799 \
10800 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10801 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10802 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10803 IEM_MC_END(); \
10804 break; \
10805 \
10806 case IEMMODE_64BIT: \
10807 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10810 \
10811 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10812 IEMOP_HLP_DONE_DECODING(); \
10813 \
10814 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10815 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10816 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10817 \
10818 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10819 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10820 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10821 \
10822 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10823 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10824 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10825 IEM_MC_END(); \
10826 break; \
10827 \
10828 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10829 } \
10830 } \
10831 else \
10832 { \
10833 (void)0
10834/* Separate macro to work around parsing issue in IEMAllInstPython.py */
10835#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10836 switch (pVCpu->iem.s.enmEffOpSize) \
10837 { \
10838 case IEMMODE_16BIT: \
10839 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10840 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10842 \
10843 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10844 IEMOP_HLP_DONE_DECODING(); \
10845 \
10846 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10847 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10848 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10849 \
10850 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10851 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10852 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU16, fEFlagsIn, pu16Dst, u16Src); \
10853 \
10854 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10855 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10856 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10857 IEM_MC_END(); \
10858 break; \
10859 \
10860 case IEMMODE_32BIT: \
10861 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10862 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10864 \
10865 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10866 IEMOP_HLP_DONE_DECODING(); \
10867 \
10868 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10869 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10870 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10871 \
10872 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10873 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10874 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU32, fEFlagsIn, pu32Dst, u32Src); \
10875 \
10876 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10877 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10878 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10879 IEM_MC_END(); \
10880 break; \
10881 \
10882 case IEMMODE_64BIT: \
10883 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10886 \
10887 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10888 IEMOP_HLP_DONE_DECODING(); \
10889 \
10890 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10891 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10892 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10893 \
10894 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10895 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10896 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU64, fEFlagsIn, pu64Dst, u64Src); \
10897 \
10898 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10899 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10900 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10901 IEM_MC_END(); \
10902 break; \
10903 \
10904 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10905 } \
10906 } \
10907 } \
10908 (void)0
10909
10910/* Read-only version (bt) */
10911#define IEMOP_BODY_BIT_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10912 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10913 \
10914 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10915 { \
10916 /* register destination. */ \
10917 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10918 \
10919 switch (pVCpu->iem.s.enmEffOpSize) \
10920 { \
10921 case IEMMODE_16BIT: \
10922 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10924 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
10925 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10926 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10927 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10928 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10929 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10930 \
10931 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10932 IEM_MC_END(); \
10933 break; \
10934 \
10935 case IEMMODE_32BIT: \
10936 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10938 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
10939 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10940 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10941 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10942 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10943 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10944 \
10945 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10946 IEM_MC_END(); \
10947 break; \
10948 \
10949 case IEMMODE_64BIT: \
10950 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10952 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
10953 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10954 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10955 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10956 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10957 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10958 \
10959 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10960 IEM_MC_END(); \
10961 break; \
10962 \
10963 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10964 } \
10965 } \
10966 else \
10967 { \
10968 /* memory destination. */ \
10969 /** @todo test negative bit offsets! */ \
10970 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10971 { \
10972 switch (pVCpu->iem.s.enmEffOpSize) \
10973 { \
10974 case IEMMODE_16BIT: \
10975 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10978 \
10979 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10980 IEMOP_HLP_DONE_DECODING(); \
10981 \
10982 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10983 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
10984 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10985 \
10986 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10987 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10988 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10989 \
10990 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
10991 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10992 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10993 IEM_MC_END(); \
10994 break; \
10995 \
10996 case IEMMODE_32BIT: \
10997 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10999 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11000 \
11001 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11002 IEMOP_HLP_DONE_DECODING(); \
11003 \
11004 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11005 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
11006 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11007 \
11008 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11009 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
11010 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
11011 \
11012 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11013 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11014 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11015 IEM_MC_END(); \
11016 break; \
11017 \
11018 case IEMMODE_64BIT: \
11019 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11022 \
11023 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11024 IEMOP_HLP_DONE_DECODING(); \
11025 \
11026 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11027 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
11028 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11029 \
11030 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11031 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
11032 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
11033 \
11034 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11035 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11036 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11037 IEM_MC_END(); \
11038 break; \
11039 \
11040 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11041 } \
11042 } \
11043 else \
11044 { \
11045 IEMOP_HLP_DONE_DECODING(); \
11046 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
11047 } \
11048 } \
11049 (void)0
11050
11051
11052/**
11053 * @opmaps grp8
11054 * @opcode /4
11055 * @oppfx n/a
11056 * @opflclass bitmap
11057 */
11058FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
11059{
11060 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
11061 IEMOP_BODY_BIT_Ev_Ib_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
11062}
11063
11064
11065/**
11066 * @opmaps grp8
11067 * @opcode /5
11068 * @oppfx n/a
11069 * @opflclass bitmap
11070 */
11071FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
11072{
11073 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
11074 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
11075 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
11076}
11077
11078
11079/**
11080 * @opmaps grp8
11081 * @opcode /6
11082 * @oppfx n/a
11083 * @opflclass bitmap
11084 */
11085FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
11086{
11087 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
11088 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
11089 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
11090}
11091
11092
11093/**
11094 * @opmaps grp8
11095 * @opcode /7
11096 * @oppfx n/a
11097 * @opflclass bitmap
11098 */
11099FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
11100{
11101 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
11102 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11103 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11104}
11105
11106
11107/** Opcode 0x0f 0xba. */
11108FNIEMOP_DEF(iemOp_Grp8)
11109{
11110 IEMOP_HLP_MIN_386();
11111 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11112 switch (IEM_GET_MODRM_REG_8(bRm))
11113 {
11114 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
11115 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
11116 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
11117 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
11118
11119 case 0: case 1: case 2: case 3:
11120 /* Both AMD and Intel want full modr/m decoding and imm8. */
11121 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
11122
11123 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11124 }
11125}
11126
11127
11128/**
11129 * @opcode 0xbb
11130 * @oppfx n/a
11131 * @opflclass bitmap
11132 */
11133FNIEMOP_DEF(iemOp_btc_Ev_Gv)
11134{
11135 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
11136 IEMOP_HLP_MIN_386();
11137 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11138 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11139}
11140
11141
11142/**
11143 * Body for BSF and BSR instructions.
11144 *
11145 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
11146 * the destination register, which means that for 32-bit operations the high
11147 * bits must be left alone.
11148 *
11149 * @param pImpl Pointer to the instruction implementation (assembly).
11150 */
11151#define IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl) \
11152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
11153 \
11154 /* \
11155 * If rm is denoting a register, no more instruction bytes. \
11156 */ \
11157 if (IEM_IS_MODRM_REG_MODE(bRm)) \
11158 { \
11159 switch (pVCpu->iem.s.enmEffOpSize) \
11160 { \
11161 case IEMMODE_16BIT: \
11162 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11164 \
11165 IEM_MC_ARG(uint16_t, u16Src, 2); \
11166 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11167 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
11168 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11169 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11170 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
11171 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11172 \
11173 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11174 IEM_MC_END(); \
11175 break; \
11176 \
11177 case IEMMODE_32BIT: \
11178 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11180 \
11181 IEM_MC_ARG(uint32_t, u32Src, 2); \
11182 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11183 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
11184 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11185 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11186 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
11187 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11188 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11189 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11190 } IEM_MC_ENDIF(); \
11191 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11192 IEM_MC_END(); \
11193 break; \
11194 \
11195 case IEMMODE_64BIT: \
11196 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11198 \
11199 IEM_MC_ARG(uint64_t, u64Src, 2); \
11200 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11201 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
11202 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11203 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11204 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
11205 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11206 \
11207 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11208 IEM_MC_END(); \
11209 break; \
11210 \
11211 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11212 } \
11213 } \
11214 else \
11215 { \
11216 /* \
11217 * We're accessing memory. \
11218 */ \
11219 switch (pVCpu->iem.s.enmEffOpSize) \
11220 { \
11221 case IEMMODE_16BIT: \
11222 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11226 \
11227 IEM_MC_ARG(uint16_t, u16Src, 2); \
11228 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11229 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
11230 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11231 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11232 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
11233 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11234 \
11235 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11236 IEM_MC_END(); \
11237 break; \
11238 \
11239 case IEMMODE_32BIT: \
11240 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11244 \
11245 IEM_MC_ARG(uint32_t, u32Src, 2); \
11246 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11247 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
11248 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11249 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11250 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
11251 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11252 \
11253 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11254 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11255 } IEM_MC_ENDIF(); \
11256 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11257 IEM_MC_END(); \
11258 break; \
11259 \
11260 case IEMMODE_64BIT: \
11261 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11265 \
11266 IEM_MC_ARG(uint64_t, u64Src, 2); \
11267 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11268 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
11269 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11270 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11271 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
11272 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11273 \
11274 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11275 IEM_MC_END(); \
11276 break; \
11277 \
11278 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11279 } \
11280 } (void)0
11281
11282
11283/**
11284 * @opcode 0xbc
11285 * @oppfx !0xf3
11286 * @opfltest cf,pf,af,sf,of
11287 * @opflmodify cf,pf,af,zf,sf,of
11288 * @opflundef cf,pf,af,sf,of
11289 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11290 * document them as inputs. Sigh.
11291 */
11292FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11293{
11294 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11295 IEMOP_HLP_MIN_386();
11296 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11297 PCIEMOPBINSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags);
11298 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11299}
11300
11301
11302/**
11303 * @opcode 0xbc
11304 * @oppfx 0xf3
11305 * @opfltest pf,af,sf,of
11306 * @opflmodify cf,pf,af,zf,sf,of
11307 * @opflundef pf,af,sf,of
11308 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11309 * document them as inputs. Sigh.
11310 */
11311FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11312{
11313 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11314 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11315 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11316
11317#ifndef TST_IEM_CHECK_MC
11318 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11319 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11320 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11321 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11322 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11323 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11324 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11325 {
11326 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11327 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11328 };
11329#endif
11330 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11331 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11332 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11333 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11334 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, tzcnt, 0);
11335}
11336
11337
11338/**
11339 * @opcode 0xbd
11340 * @oppfx !0xf3
11341 * @opfltest cf,pf,af,sf,of
11342 * @opflmodify cf,pf,af,zf,sf,of
11343 * @opflundef cf,pf,af,sf,of
11344 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11345 * document them as inputs. Sigh.
11346 */
11347FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11348{
11349 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11350 IEMOP_HLP_MIN_386();
11351 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11352 PCIEMOPBINSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags);
11353 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11354}
11355
11356
11357/**
11358 * @opcode 0xbd
11359 * @oppfx 0xf3
11360 * @opfltest pf,af,sf,of
11361 * @opflmodify cf,pf,af,zf,sf,of
11362 * @opflundef pf,af,sf,of
11363 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11364 * document them as inputs. Sigh.
11365 */
11366FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11367{
11368 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAbm)
11369 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11370 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11371
11372#ifndef TST_IEM_CHECK_MC
11373 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11374 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11375 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11376 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11377 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11378 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11379 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11380 {
11381 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11382 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11383 };
11384#endif
11385 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11386 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11387 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11388 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11389 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, lzcnt, 0);
11390}
11391
11392
11393
11394/** Opcode 0x0f 0xbe. */
11395FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11396{
11397 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11398 IEMOP_HLP_MIN_386();
11399
11400 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11401
11402 /*
11403 * If rm is denoting a register, no more instruction bytes.
11404 */
11405 if (IEM_IS_MODRM_REG_MODE(bRm))
11406 {
11407 switch (pVCpu->iem.s.enmEffOpSize)
11408 {
11409 case IEMMODE_16BIT:
11410 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11412 IEM_MC_LOCAL(uint16_t, u16Value);
11413 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11414 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11415 IEM_MC_ADVANCE_RIP_AND_FINISH();
11416 IEM_MC_END();
11417 break;
11418
11419 case IEMMODE_32BIT:
11420 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11422 IEM_MC_LOCAL(uint32_t, u32Value);
11423 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11424 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11425 IEM_MC_ADVANCE_RIP_AND_FINISH();
11426 IEM_MC_END();
11427 break;
11428
11429 case IEMMODE_64BIT:
11430 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11432 IEM_MC_LOCAL(uint64_t, u64Value);
11433 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11434 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11435 IEM_MC_ADVANCE_RIP_AND_FINISH();
11436 IEM_MC_END();
11437 break;
11438
11439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11440 }
11441 }
11442 else
11443 {
11444 /*
11445 * We're loading a register from memory.
11446 */
11447 switch (pVCpu->iem.s.enmEffOpSize)
11448 {
11449 case IEMMODE_16BIT:
11450 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11451 IEM_MC_LOCAL(uint16_t, u16Value);
11452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11455 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11456 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11457 IEM_MC_ADVANCE_RIP_AND_FINISH();
11458 IEM_MC_END();
11459 break;
11460
11461 case IEMMODE_32BIT:
11462 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11463 IEM_MC_LOCAL(uint32_t, u32Value);
11464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11467 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11468 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11469 IEM_MC_ADVANCE_RIP_AND_FINISH();
11470 IEM_MC_END();
11471 break;
11472
11473 case IEMMODE_64BIT:
11474 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11475 IEM_MC_LOCAL(uint64_t, u64Value);
11476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11479 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11480 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11481 IEM_MC_ADVANCE_RIP_AND_FINISH();
11482 IEM_MC_END();
11483 break;
11484
11485 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11486 }
11487 }
11488}
11489
11490
11491/** Opcode 0x0f 0xbf. */
11492FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11493{
11494 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11495 IEMOP_HLP_MIN_386();
11496
11497 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11498
11499 /** @todo Not entirely sure how the operand size prefix is handled here,
11500 * assuming that it will be ignored. Would be nice to have a few
11501 * test for this. */
11502 /*
11503 * If rm is denoting a register, no more instruction bytes.
11504 */
11505 if (IEM_IS_MODRM_REG_MODE(bRm))
11506 {
11507 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11508 {
11509 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11511 IEM_MC_LOCAL(uint32_t, u32Value);
11512 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11513 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11514 IEM_MC_ADVANCE_RIP_AND_FINISH();
11515 IEM_MC_END();
11516 }
11517 else
11518 {
11519 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11521 IEM_MC_LOCAL(uint64_t, u64Value);
11522 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11523 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11524 IEM_MC_ADVANCE_RIP_AND_FINISH();
11525 IEM_MC_END();
11526 }
11527 }
11528 else
11529 {
11530 /*
11531 * We're loading a register from memory.
11532 */
11533 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11534 {
11535 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11536 IEM_MC_LOCAL(uint32_t, u32Value);
11537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11540 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11541 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11542 IEM_MC_ADVANCE_RIP_AND_FINISH();
11543 IEM_MC_END();
11544 }
11545 else
11546 {
11547 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11548 IEM_MC_LOCAL(uint64_t, u64Value);
11549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11552 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11553 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11554 IEM_MC_ADVANCE_RIP_AND_FINISH();
11555 IEM_MC_END();
11556 }
11557 }
11558}
11559
11560
11561/**
11562 * @opcode 0xc0
11563 * @opflclass arithmetic
11564 */
11565FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11566{
11567 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11568 IEMOP_HLP_MIN_486();
11569 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11570
11571 /*
11572 * If rm is denoting a register, no more instruction bytes.
11573 */
11574 if (IEM_IS_MODRM_REG_MODE(bRm))
11575 {
11576 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11578 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11579 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11580 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11581
11582 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11583 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11584 IEM_MC_REF_EFLAGS(pEFlags);
11585 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11586
11587 IEM_MC_ADVANCE_RIP_AND_FINISH();
11588 IEM_MC_END();
11589 }
11590 else
11591 {
11592 /*
11593 * We're accessing memory.
11594 */
11595#define IEMOP_BODY_XADD_BYTE(a_fnWorker, a_Type) \
11596 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11599 IEMOP_HLP_DONE_DECODING(); \
11600 \
11601 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11602 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11603 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11604 \
11605 IEM_MC_LOCAL(uint8_t, u8RegCopy); \
11606 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11607 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, u8RegCopy, 1); \
11608 \
11609 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11610 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker, pu8Dst, pu8Reg, pEFlags); \
11611 \
11612 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11613 IEM_MC_COMMIT_EFLAGS(EFlags); \
11614 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy); \
11615 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11616 IEM_MC_END()
11617 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11618 {
11619 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8,RW);
11620 }
11621 else
11622 {
11623 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8_locked,ATOMIC);
11624 }
11625 }
11626}
11627
11628
11629/**
11630 * @opcode 0xc1
11631 * @opflclass arithmetic
11632 */
11633FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11634{
11635 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11636 IEMOP_HLP_MIN_486();
11637 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11638
11639 /*
11640 * If rm is denoting a register, no more instruction bytes.
11641 */
11642 if (IEM_IS_MODRM_REG_MODE(bRm))
11643 {
11644 switch (pVCpu->iem.s.enmEffOpSize)
11645 {
11646 case IEMMODE_16BIT:
11647 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11649 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11650 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11651 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11652
11653 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11654 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11655 IEM_MC_REF_EFLAGS(pEFlags);
11656 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11657
11658 IEM_MC_ADVANCE_RIP_AND_FINISH();
11659 IEM_MC_END();
11660 break;
11661
11662 case IEMMODE_32BIT:
11663 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11665 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11666 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11667 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11668
11669 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11670 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11671 IEM_MC_REF_EFLAGS(pEFlags);
11672 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11673
11674 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
11675 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11676 IEM_MC_ADVANCE_RIP_AND_FINISH();
11677 IEM_MC_END();
11678 break;
11679
11680 case IEMMODE_64BIT:
11681 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11683 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11684 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11685 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11686
11687 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11688 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11689 IEM_MC_REF_EFLAGS(pEFlags);
11690 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11691
11692 IEM_MC_ADVANCE_RIP_AND_FINISH();
11693 IEM_MC_END();
11694 break;
11695
11696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11697 }
11698 }
11699 else
11700 {
11701 /*
11702 * We're accessing memory.
11703 */
11704#define IEMOP_BODY_XADD_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
11705 do { \
11706 switch (pVCpu->iem.s.enmEffOpSize) \
11707 { \
11708 case IEMMODE_16BIT: \
11709 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11712 IEMOP_HLP_DONE_DECODING(); \
11713 \
11714 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11715 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11716 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11717 \
11718 IEM_MC_LOCAL(uint16_t, u16RegCopy); \
11719 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11720 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, u16RegCopy, 1); \
11721 \
11722 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11723 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker16, pu16Dst, pu16Reg, pEFlags); \
11724 \
11725 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11726 IEM_MC_COMMIT_EFLAGS(EFlags); \
11727 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy); \
11728 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11729 IEM_MC_END(); \
11730 break; \
11731 \
11732 case IEMMODE_32BIT: \
11733 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11736 IEMOP_HLP_DONE_DECODING(); \
11737 \
11738 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11739 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11740 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11741 \
11742 IEM_MC_LOCAL(uint32_t, u32RegCopy); \
11743 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11744 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, u32RegCopy, 1); \
11745 \
11746 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11747 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker32, pu32Dst, pu32Reg, pEFlags); \
11748 \
11749 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11750 IEM_MC_COMMIT_EFLAGS(EFlags); \
11751 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy); \
11752 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11753 IEM_MC_END(); \
11754 break; \
11755 \
11756 case IEMMODE_64BIT: \
11757 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11760 IEMOP_HLP_DONE_DECODING(); \
11761 \
11762 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11763 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11764 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11765 \
11766 IEM_MC_LOCAL(uint64_t, u64RegCopy); \
11767 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11768 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, u64RegCopy, 1); \
11769 \
11770 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11771 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker64, pu64Dst, pu64Reg, pEFlags); \
11772 \
11773 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11774 IEM_MC_COMMIT_EFLAGS(EFlags); \
11775 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy); \
11776 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11777 IEM_MC_END(); \
11778 break; \
11779 \
11780 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11781 } \
11782 } while (0)
11783
11784 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11785 {
11786 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16, iemAImpl_xadd_u32, iemAImpl_xadd_u64,RW);
11787 }
11788 else
11789 {
11790 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16_locked, iemAImpl_xadd_u32_locked, iemAImpl_xadd_u64_locked,ATOMIC);
11791 }
11792 }
11793}
11794
11795
11796/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11797FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11798{
11799 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11800
11801 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11802 if (IEM_IS_MODRM_REG_MODE(bRm))
11803 {
11804 /*
11805 * XMM, XMM.
11806 */
11807 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11808 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11810 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11811 IEM_MC_LOCAL(X86XMMREG, Dst);
11812 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11813 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11814 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11815 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11816 IEM_MC_PREPARE_SSE_USAGE();
11817 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11818 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpps_u128, pDst, pSrc, bImmArg);
11819 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11820
11821 IEM_MC_ADVANCE_RIP_AND_FINISH();
11822 IEM_MC_END();
11823 }
11824 else
11825 {
11826 /*
11827 * XMM, [mem128].
11828 */
11829 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11830 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11831 IEM_MC_LOCAL(X86XMMREG, Dst);
11832 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11833 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11835
11836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11837 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11838 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11840 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11841 IEM_MC_PREPARE_SSE_USAGE();
11842
11843 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11844 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpps_u128, pDst, pSrc, bImmArg);
11845 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11846
11847 IEM_MC_ADVANCE_RIP_AND_FINISH();
11848 IEM_MC_END();
11849 }
11850}
11851
11852
11853/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11854FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11855{
11856 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11857
11858 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11859 if (IEM_IS_MODRM_REG_MODE(bRm))
11860 {
11861 /*
11862 * XMM, XMM.
11863 */
11864 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11865 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11867 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11868 IEM_MC_LOCAL(X86XMMREG, Dst);
11869 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11870 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11871 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11872 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11873 IEM_MC_PREPARE_SSE_USAGE();
11874 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11875 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmppd_u128, pDst, pSrc, bImmArg);
11876 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11877
11878 IEM_MC_ADVANCE_RIP_AND_FINISH();
11879 IEM_MC_END();
11880 }
11881 else
11882 {
11883 /*
11884 * XMM, [mem128].
11885 */
11886 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11887 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11888 IEM_MC_LOCAL(X86XMMREG, Dst);
11889 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11890 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11892
11893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11894 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11895 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11897 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11898 IEM_MC_PREPARE_SSE_USAGE();
11899
11900 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11901 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmppd_u128, pDst, pSrc, bImmArg);
11902 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11903
11904 IEM_MC_ADVANCE_RIP_AND_FINISH();
11905 IEM_MC_END();
11906 }
11907}
11908
11909
11910/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11911FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11912{
11913 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11914
11915 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11916 if (IEM_IS_MODRM_REG_MODE(bRm))
11917 {
11918 /*
11919 * XMM32, XMM32.
11920 */
11921 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11922 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11924 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11925 IEM_MC_LOCAL(X86XMMREG, Dst);
11926 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11927 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11928 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11929 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11930 IEM_MC_PREPARE_SSE_USAGE();
11931 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11932 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpss_u128, pDst, pSrc, bImmArg);
11933 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11934
11935 IEM_MC_ADVANCE_RIP_AND_FINISH();
11936 IEM_MC_END();
11937 }
11938 else
11939 {
11940 /*
11941 * XMM32, [mem32].
11942 */
11943 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11944 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11945 IEM_MC_LOCAL(X86XMMREG, Dst);
11946 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11947 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11949
11950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11951 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11952 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11954 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11955 IEM_MC_PREPARE_SSE_USAGE();
11956
11957 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
11958 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11959 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpss_u128, pDst, pSrc, bImmArg);
11960 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11961
11962 IEM_MC_ADVANCE_RIP_AND_FINISH();
11963 IEM_MC_END();
11964 }
11965}
11966
11967
11968/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11969FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11970{
11971 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11972
11973 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11974 if (IEM_IS_MODRM_REG_MODE(bRm))
11975 {
11976 /*
11977 * XMM64, XMM64.
11978 */
11979 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11980 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11982 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11983 IEM_MC_LOCAL(X86XMMREG, Dst);
11984 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11985 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11986 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11987 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11988 IEM_MC_PREPARE_SSE_USAGE();
11989 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11990 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpsd_u128, pDst, pSrc, bImmArg);
11991 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11992
11993 IEM_MC_ADVANCE_RIP_AND_FINISH();
11994 IEM_MC_END();
11995 }
11996 else
11997 {
11998 /*
11999 * XMM64, [mem64].
12000 */
12001 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12002 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12003 IEM_MC_LOCAL(X86XMMREG, Dst);
12004 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
12005 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
12006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12007
12008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12009 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12010 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12013 IEM_MC_PREPARE_SSE_USAGE();
12014
12015 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
12016 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12017 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpsd_u128, pDst, pSrc, bImmArg);
12018 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
12019
12020 IEM_MC_ADVANCE_RIP_AND_FINISH();
12021 IEM_MC_END();
12022 }
12023}
12024
12025
12026/** Opcode 0x0f 0xc3. */
12027FNIEMOP_DEF(iemOp_movnti_My_Gy)
12028{
12029 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
12030
12031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12032
12033 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
12034 if (IEM_IS_MODRM_MEM_MODE(bRm))
12035 {
12036 switch (pVCpu->iem.s.enmEffOpSize)
12037 {
12038 case IEMMODE_32BIT:
12039 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
12040 IEM_MC_LOCAL(uint32_t, u32Value);
12041 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12042
12043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12045
12046 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12047 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
12048 IEM_MC_ADVANCE_RIP_AND_FINISH();
12049 IEM_MC_END();
12050 break;
12051
12052 case IEMMODE_64BIT:
12053 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
12054 IEM_MC_LOCAL(uint64_t, u64Value);
12055 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12056
12057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12059
12060 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12061 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
12062 IEM_MC_ADVANCE_RIP_AND_FINISH();
12063 IEM_MC_END();
12064 break;
12065
12066 case IEMMODE_16BIT:
12067 /** @todo check this form. */
12068 IEMOP_RAISE_INVALID_OPCODE_RET();
12069
12070 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12071 }
12072 }
12073 else
12074 IEMOP_RAISE_INVALID_OPCODE_RET();
12075}
12076
12077
12078/* Opcode 0x66 0x0f 0xc3 - invalid */
12079/* Opcode 0xf3 0x0f 0xc3 - invalid */
12080/* Opcode 0xf2 0x0f 0xc3 - invalid */
12081
12082
12083/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
12084FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
12085{
12086 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12087 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12088 if (IEM_IS_MODRM_REG_MODE(bRm))
12089 {
12090 /*
12091 * Register, register.
12092 */
12093 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12094 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12095 IEM_MC_LOCAL(uint16_t, uValue);
12096
12097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12098 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12099 IEM_MC_PREPARE_FPU_USAGE();
12100 IEM_MC_FPU_TO_MMX_MODE();
12101
12102 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
12103 IEM_MC_STORE_MREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
12104
12105 IEM_MC_ADVANCE_RIP_AND_FINISH();
12106 IEM_MC_END();
12107 }
12108 else
12109 {
12110 /*
12111 * Register, memory.
12112 */
12113 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12115 IEM_MC_LOCAL(uint16_t, uValue);
12116
12117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12118 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12120 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12121 IEM_MC_PREPARE_FPU_USAGE();
12122
12123 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12124 IEM_MC_FPU_TO_MMX_MODE();
12125 IEM_MC_STORE_MREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
12126
12127 IEM_MC_ADVANCE_RIP_AND_FINISH();
12128 IEM_MC_END();
12129 }
12130}
12131
12132
12133/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
12134FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
12135{
12136 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12138 if (IEM_IS_MODRM_REG_MODE(bRm))
12139 {
12140 /*
12141 * Register, register.
12142 */
12143 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12144 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12146
12147 IEM_MC_LOCAL(uint16_t, uValue);
12148 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12149 IEM_MC_PREPARE_SSE_USAGE();
12150
12151 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
12152 IEM_MC_STORE_XREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
12153 IEM_MC_ADVANCE_RIP_AND_FINISH();
12154 IEM_MC_END();
12155 }
12156 else
12157 {
12158 /*
12159 * Register, memory.
12160 */
12161 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12163 IEM_MC_LOCAL(uint16_t, uValue);
12164
12165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12166 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12168 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12169 IEM_MC_PREPARE_SSE_USAGE();
12170
12171 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12172 IEM_MC_STORE_XREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
12173 IEM_MC_ADVANCE_RIP_AND_FINISH();
12174 IEM_MC_END();
12175 }
12176}
12177
12178
12179/* Opcode 0xf3 0x0f 0xc4 - invalid */
12180/* Opcode 0xf2 0x0f 0xc4 - invalid */
12181
12182
12183/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
12184FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
12185{
12186 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
12187 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12188 if (IEM_IS_MODRM_REG_MODE(bRm))
12189 {
12190 /*
12191 * Greg32, MMX, imm8.
12192 */
12193 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12194 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12196 IEM_MC_LOCAL(uint16_t, uValue);
12197 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12198 IEM_MC_PREPARE_FPU_USAGE();
12199 IEM_MC_FPU_TO_MMX_MODE();
12200 IEM_MC_FETCH_MREG_U16(uValue, IEM_GET_MODRM_RM_8(bRm), bImm & 3);
12201 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
12202 IEM_MC_ADVANCE_RIP_AND_FINISH();
12203 IEM_MC_END();
12204 }
12205 /* No memory operand. */
12206 else
12207 IEMOP_RAISE_INVALID_OPCODE_RET();
12208}
12209
12210
12211/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12212FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12213{
12214 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12216 if (IEM_IS_MODRM_REG_MODE(bRm))
12217 {
12218 /*
12219 * Greg32, XMM, imm8.
12220 */
12221 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12222 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12224 IEM_MC_LOCAL(uint16_t, uValue);
12225 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12226 IEM_MC_PREPARE_SSE_USAGE();
12227 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
12228 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
12229 IEM_MC_ADVANCE_RIP_AND_FINISH();
12230 IEM_MC_END();
12231 }
12232 /* No memory operand. */
12233 else
12234 IEMOP_RAISE_INVALID_OPCODE_RET();
12235}
12236
12237
12238/* Opcode 0xf3 0x0f 0xc5 - invalid */
12239/* Opcode 0xf2 0x0f 0xc5 - invalid */
12240
12241
12242/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12243FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12244{
12245 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12247 if (IEM_IS_MODRM_REG_MODE(bRm))
12248 {
12249 /*
12250 * XMM, XMM, imm8.
12251 */
12252 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12253 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12255 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12256 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12257 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12258 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12259 IEM_MC_PREPARE_SSE_USAGE();
12260 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12261 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12262 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12263 IEM_MC_ADVANCE_RIP_AND_FINISH();
12264 IEM_MC_END();
12265 }
12266 else
12267 {
12268 /*
12269 * XMM, [mem128], imm8.
12270 */
12271 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12272 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12273 IEM_MC_LOCAL(RTUINT128U, uSrc);
12274 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12276
12277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12278 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12279 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12281 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12282 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12283
12284 IEM_MC_PREPARE_SSE_USAGE();
12285 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12286 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12287
12288 IEM_MC_ADVANCE_RIP_AND_FINISH();
12289 IEM_MC_END();
12290 }
12291}
12292
12293
12294/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12295FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12296{
12297 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12298 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12299 if (IEM_IS_MODRM_REG_MODE(bRm))
12300 {
12301 /*
12302 * XMM, XMM, imm8.
12303 */
12304 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12305 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12307 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12308 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12309 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12310 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12311 IEM_MC_PREPARE_SSE_USAGE();
12312 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12313 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12314 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12315 IEM_MC_ADVANCE_RIP_AND_FINISH();
12316 IEM_MC_END();
12317 }
12318 else
12319 {
12320 /*
12321 * XMM, [mem128], imm8.
12322 */
12323 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12324 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12325 IEM_MC_LOCAL(RTUINT128U, uSrc);
12326 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12328
12329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12330 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12331 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12333 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12334 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12335
12336 IEM_MC_PREPARE_SSE_USAGE();
12337 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12338 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12339
12340 IEM_MC_ADVANCE_RIP_AND_FINISH();
12341 IEM_MC_END();
12342 }
12343}
12344
12345
12346/* Opcode 0xf3 0x0f 0xc6 - invalid */
12347/* Opcode 0xf2 0x0f 0xc6 - invalid */
12348
12349
12350/**
12351 * @opmaps grp9
12352 * @opcode /1
12353 * @opcodesub !11 mr/reg rex.w=0
12354 * @oppfx n/a
12355 * @opflmodify zf
12356 */
12357FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12358{
12359 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12360#define IEMOP_BODY_CMPXCHG8B(a_fnWorker, a_Type) \
12361 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
12362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12364 IEMOP_HLP_DONE_DECODING_EX(fCmpXchg8b); \
12365 \
12366 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12367 IEM_MC_ARG(uint64_t *, pu64MemDst, 0); \
12368 IEM_MC_MEM_MAP_U64_##a_Type(pu64MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12369 \
12370 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx); \
12371 IEM_MC_FETCH_GREG_PAIR_U32(u64EaxEdx, X86_GREG_xAX, X86_GREG_xDX); \
12372 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EaxEdx, u64EaxEdx, 1); \
12373 \
12374 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx); \
12375 IEM_MC_FETCH_GREG_PAIR_U32(u64EbxEcx, X86_GREG_xBX, X86_GREG_xCX); \
12376 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EbxEcx, u64EbxEcx, 2); \
12377 \
12378 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
12379 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags); \
12380 \
12381 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12382 IEM_MC_COMMIT_EFLAGS(EFlags); \
12383 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12384 IEM_MC_STORE_GREG_PAIR_U32(X86_GREG_xAX, X86_GREG_xDX, u64EaxEdx); \
12385 } IEM_MC_ENDIF(); \
12386 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12387 \
12388 IEM_MC_END()
12389 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12390 {
12391 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b,RW);
12392 }
12393 else
12394 {
12395 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b_locked,ATOMIC);
12396 }
12397}
12398
12399
12400/**
12401 * @opmaps grp9
12402 * @opcode /1
12403 * @opcodesub !11 mr/reg rex.w=1
12404 * @oppfx n/a
12405 * @opflmodify zf
12406 */
12407FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12408{
12409 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12410 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12411 {
12412 /*
12413 * This is hairy, very hairy macro fun. We're walking a fine line
12414 * here to make the code parsable by IEMAllInstPython.py and fit into
12415 * the patterns IEMAllThrdPython.py requires for the code morphing.
12416 */
12417#define BODY_CMPXCHG16B_HEAD(bUnmapInfoStmt, a_Type) \
12418 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
12419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12421 IEMOP_HLP_DONE_DECODING(); \
12422 \
12423 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16); \
12424 bUnmapInfoStmt; \
12425 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0); \
12426 IEM_MC_MEM_MAP_U128_##a_Type(pu128MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12427 \
12428 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx); \
12429 IEM_MC_FETCH_GREG_PAIR_U64(u128RaxRdx, X86_GREG_xAX, X86_GREG_xDX); \
12430 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RaxRdx, u128RaxRdx, 1); \
12431 \
12432 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx); \
12433 IEM_MC_FETCH_GREG_PAIR_U64(u128RbxRcx, X86_GREG_xBX, X86_GREG_xCX); \
12434 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RbxRcx, u128RbxRcx, 2); \
12435 \
12436 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3)
12437
12438#define BODY_CMPXCHG16B_TAIL(a_Type) \
12439 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12440 IEM_MC_COMMIT_EFLAGS(EFlags); \
12441 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12442 IEM_MC_STORE_GREG_PAIR_U64(X86_GREG_xAX, X86_GREG_xDX, u128RaxRdx); \
12443 } IEM_MC_ENDIF(); \
12444 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12445 IEM_MC_END()
12446
12447#ifdef RT_ARCH_AMD64
12448 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12449 {
12450 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12451 {
12452 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12453 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12454 BODY_CMPXCHG16B_TAIL(RW);
12455 }
12456 else
12457 {
12458 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12459 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12460 BODY_CMPXCHG16B_TAIL(ATOMIC);
12461 }
12462 }
12463 else
12464 { /* (see comments in #else case below) */
12465 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12466 {
12467 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12468 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12469 BODY_CMPXCHG16B_TAIL(RW);
12470 }
12471 else
12472 {
12473 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12474 IEM_MC_CALL_CIMPL_5(IEM_CIMPL_F_STATUS_FLAGS,
12475 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12476 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12477 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx,
12478 pEFlags, bUnmapInfo);
12479 IEM_MC_END();
12480 }
12481 }
12482
12483#elif defined(RT_ARCH_ARM64)
12484 /** @todo may require fallback for unaligned accesses... */
12485 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12486 {
12487 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12488 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12489 BODY_CMPXCHG16B_TAIL(RW);
12490 }
12491 else
12492 {
12493 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12494 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12495 BODY_CMPXCHG16B_TAIL(ATOMIC);
12496 }
12497
12498#else
12499 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12500 accesses and not all all atomic, which works fine on in UNI CPU guest
12501 configuration (ignoring DMA). If guest SMP is active we have no choice
12502 but to use a rendezvous callback here. Sigh. */
12503 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12504 {
12505 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12506 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12507 BODY_CMPXCHG16B_TAIL(RW);
12508 }
12509 else
12510 {
12511 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12512 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12513 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12514 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12515 iemCImpl_cmpxchg16b_fallback_rendezvous,
12516 pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12517 IEM_MC_END();
12518 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12519 }
12520#endif
12521
12522#undef BODY_CMPXCHG16B
12523 }
12524 Log(("cmpxchg16b -> #UD\n"));
12525 IEMOP_RAISE_INVALID_OPCODE_RET();
12526}
12527
12528FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12529{
12530 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12531 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12532 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12533}
12534
12535
12536/** Opcode 0x0f 0xc7 11/6. */
12537FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12538{
12539 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12540 IEMOP_RAISE_INVALID_OPCODE_RET();
12541
12542 if (IEM_IS_MODRM_REG_MODE(bRm))
12543 {
12544 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12546 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12547 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12548 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12549 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12550 iemCImpl_rdrand, iReg, enmEffOpSize);
12551 IEM_MC_END();
12552 }
12553 /* Register only. */
12554 else
12555 IEMOP_RAISE_INVALID_OPCODE_RET();
12556}
12557
12558/** Opcode 0x0f 0xc7 !11/6. */
12559#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12560FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12561{
12562 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12563 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12564 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12565 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12566 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12568 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12569 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12570 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12571 IEM_MC_END();
12572}
12573#else
12574FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12575#endif
12576
12577/** Opcode 0x66 0x0f 0xc7 !11/6. */
12578#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12579FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12580{
12581 IEMOP_MNEMONIC(vmclear, "vmclear");
12582 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12583 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12584 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12585 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12587 IEMOP_HLP_DONE_DECODING();
12588 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12589 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12590 IEM_MC_END();
12591}
12592#else
12593FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12594#endif
12595
12596/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12597#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12598FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12599{
12600 IEMOP_MNEMONIC(vmxon, "vmxon");
12601 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12602 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12603 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12605 IEMOP_HLP_DONE_DECODING();
12606 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12607 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12608 IEM_MC_END();
12609}
12610#else
12611FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12612#endif
12613
12614/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12615#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12616FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12617{
12618 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12619 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12620 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12621 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12622 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12624 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12625 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12626 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12627 IEM_MC_END();
12628}
12629#else
12630FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12631#endif
12632
12633/** Opcode 0x0f 0xc7 11/7. */
12634FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12635{
12636 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12637 IEMOP_RAISE_INVALID_OPCODE_RET();
12638
12639 if (IEM_IS_MODRM_REG_MODE(bRm))
12640 {
12641 /* register destination. */
12642 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12644 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12645 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12646 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12647 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12648 iemCImpl_rdseed, iReg, enmEffOpSize);
12649 IEM_MC_END();
12650 }
12651 /* Register only. */
12652 else
12653 IEMOP_RAISE_INVALID_OPCODE_RET();
12654}
12655
12656/**
12657 * Group 9 jump table for register variant.
12658 */
12659IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12660{ /* pfx: none, 066h, 0f3h, 0f2h */
12661 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12662 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12663 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12664 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12665 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12666 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12667 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12668 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12669};
12670AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12671
12672
12673/**
12674 * Group 9 jump table for memory variant.
12675 */
12676IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12677{ /* pfx: none, 066h, 0f3h, 0f2h */
12678 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12679 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12680 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12681 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12682 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12683 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12684 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12685 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12686};
12687AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12688
12689
12690/** Opcode 0x0f 0xc7. */
12691FNIEMOP_DEF(iemOp_Grp9)
12692{
12693 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12694 if (IEM_IS_MODRM_REG_MODE(bRm))
12695 /* register, register */
12696 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12697 + pVCpu->iem.s.idxPrefix], bRm);
12698 /* memory, register */
12699 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12700 + pVCpu->iem.s.idxPrefix], bRm);
12701}
12702
12703
12704/**
12705 * Common 'bswap register' helper.
12706 */
12707FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12708{
12709 switch (pVCpu->iem.s.enmEffOpSize)
12710 {
12711 case IEMMODE_16BIT:
12712 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
12713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12714 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12715 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12716 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12717 IEM_MC_ADVANCE_RIP_AND_FINISH();
12718 IEM_MC_END();
12719 break;
12720
12721 case IEMMODE_32BIT:
12722 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
12723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12724 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12725 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12726 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12727 IEM_MC_CLEAR_HIGH_GREG_U64(iReg);
12728 IEM_MC_ADVANCE_RIP_AND_FINISH();
12729 IEM_MC_END();
12730 break;
12731
12732 case IEMMODE_64BIT:
12733 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
12734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12735 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12736 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12737 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12738 IEM_MC_ADVANCE_RIP_AND_FINISH();
12739 IEM_MC_END();
12740 break;
12741
12742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12743 }
12744}
12745
12746
12747/** Opcode 0x0f 0xc8. */
12748FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12749{
12750 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12751 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12752 prefix. REX.B is the correct prefix it appears. For a parallel
12753 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12754 IEMOP_HLP_MIN_486();
12755 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12756}
12757
12758
12759/** Opcode 0x0f 0xc9. */
12760FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12761{
12762 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12763 IEMOP_HLP_MIN_486();
12764 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12765}
12766
12767
12768/** Opcode 0x0f 0xca. */
12769FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12770{
12771 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12772 IEMOP_HLP_MIN_486();
12773 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12774}
12775
12776
12777/** Opcode 0x0f 0xcb. */
12778FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12779{
12780 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12781 IEMOP_HLP_MIN_486();
12782 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12783}
12784
12785
12786/** Opcode 0x0f 0xcc. */
12787FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12788{
12789 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12790 IEMOP_HLP_MIN_486();
12791 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12792}
12793
12794
12795/** Opcode 0x0f 0xcd. */
12796FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12797{
12798 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12799 IEMOP_HLP_MIN_486();
12800 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12801}
12802
12803
12804/** Opcode 0x0f 0xce. */
12805FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12806{
12807 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12808 IEMOP_HLP_MIN_486();
12809 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12810}
12811
12812
12813/** Opcode 0x0f 0xcf. */
12814FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12815{
12816 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12817 IEMOP_HLP_MIN_486();
12818 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12819}
12820
12821
12822/* Opcode 0x0f 0xd0 - invalid */
12823
12824
12825/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12826FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12827{
12828 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12829 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12830}
12831
12832
12833/* Opcode 0xf3 0x0f 0xd0 - invalid */
12834
12835
12836/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12837FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12838{
12839 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12840 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12841}
12842
12843
12844
12845/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12846FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12847{
12848 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12849 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12850}
12851
12852/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12853FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12854{
12855 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12856 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12857}
12858
12859/* Opcode 0xf3 0x0f 0xd1 - invalid */
12860/* Opcode 0xf2 0x0f 0xd1 - invalid */
12861
12862/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12863FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12864{
12865 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12866 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12867}
12868
12869
12870/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12871FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12872{
12873 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12874 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12875}
12876
12877
12878/* Opcode 0xf3 0x0f 0xd2 - invalid */
12879/* Opcode 0xf2 0x0f 0xd2 - invalid */
12880
12881/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12882FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12883{
12884 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12885 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12886}
12887
12888
12889/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12890FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12891{
12892 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12893 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12894}
12895
12896
12897/* Opcode 0xf3 0x0f 0xd3 - invalid */
12898/* Opcode 0xf2 0x0f 0xd3 - invalid */
12899
12900
12901/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12902FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12903{
12904 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12905 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
12906}
12907
12908
12909/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12910FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12911{
12912 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12913 SSE2_OPT_BODY_FullFull_To_Full(paddq, iemAImpl_paddq_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
12914}
12915
12916
12917/* Opcode 0xf3 0x0f 0xd4 - invalid */
12918/* Opcode 0xf2 0x0f 0xd4 - invalid */
12919
12920/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12921FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12922{
12923 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12924 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmullw_u64);
12925}
12926
12927/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12928FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12929{
12930 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12931 SSE2_OPT_BODY_FullFull_To_Full(pmullw, iemAImpl_pmullw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
12932}
12933
12934
12935/* Opcode 0xf3 0x0f 0xd5 - invalid */
12936/* Opcode 0xf2 0x0f 0xd5 - invalid */
12937
12938/* Opcode 0x0f 0xd6 - invalid */
12939
12940/**
12941 * @opcode 0xd6
12942 * @oppfx 0x66
12943 * @opcpuid sse2
12944 * @opgroup og_sse2_pcksclr_datamove
12945 * @opxcpttype none
12946 * @optest op1=-1 op2=2 -> op1=2
12947 * @optest op1=0 op2=-42 -> op1=-42
12948 */
12949FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12950{
12951 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12952 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12953 if (IEM_IS_MODRM_REG_MODE(bRm))
12954 {
12955 /*
12956 * Register, register.
12957 */
12958 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12960 IEM_MC_LOCAL(uint64_t, uSrc);
12961
12962 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12963 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12964
12965 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12966 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12967
12968 IEM_MC_ADVANCE_RIP_AND_FINISH();
12969 IEM_MC_END();
12970 }
12971 else
12972 {
12973 /*
12974 * Memory, register.
12975 */
12976 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12977 IEM_MC_LOCAL(uint64_t, uSrc);
12978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12979
12980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12982 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12983 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12984
12985 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12986 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12987
12988 IEM_MC_ADVANCE_RIP_AND_FINISH();
12989 IEM_MC_END();
12990 }
12991}
12992
12993
12994/**
12995 * @opcode 0xd6
12996 * @opcodesub 11 mr/reg
12997 * @oppfx f3
12998 * @opcpuid sse2
12999 * @opgroup og_sse2_simdint_datamove
13000 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13001 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13002 */
13003FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
13004{
13005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13006 if (IEM_IS_MODRM_REG_MODE(bRm))
13007 {
13008 /*
13009 * Register, register.
13010 */
13011 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13012 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13014 IEM_MC_LOCAL(uint64_t, uSrc);
13015
13016 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13017 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13018 IEM_MC_FPU_TO_MMX_MODE();
13019
13020 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
13021 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
13022
13023 IEM_MC_ADVANCE_RIP_AND_FINISH();
13024 IEM_MC_END();
13025 }
13026
13027 /**
13028 * @opdone
13029 * @opmnemonic udf30fd6mem
13030 * @opcode 0xd6
13031 * @opcodesub !11 mr/reg
13032 * @oppfx f3
13033 * @opunused intel-modrm
13034 * @opcpuid sse
13035 * @optest ->
13036 */
13037 else
13038 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13039}
13040
13041
13042/**
13043 * @opcode 0xd6
13044 * @opcodesub 11 mr/reg
13045 * @oppfx f2
13046 * @opcpuid sse2
13047 * @opgroup og_sse2_simdint_datamove
13048 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13049 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13050 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
13051 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
13052 * @optest op1=-42 op2=0xfedcba9876543210
13053 * -> op1=0xfedcba9876543210 ftw=0xff
13054 */
13055FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
13056{
13057 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13058 if (IEM_IS_MODRM_REG_MODE(bRm))
13059 {
13060 /*
13061 * Register, register.
13062 */
13063 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13064 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13066 IEM_MC_LOCAL(uint64_t, uSrc);
13067
13068 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13069 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13070 IEM_MC_FPU_TO_MMX_MODE();
13071
13072 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
13073 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
13074
13075 IEM_MC_ADVANCE_RIP_AND_FINISH();
13076 IEM_MC_END();
13077 }
13078
13079 /**
13080 * @opdone
13081 * @opmnemonic udf20fd6mem
13082 * @opcode 0xd6
13083 * @opcodesub !11 mr/reg
13084 * @oppfx f2
13085 * @opunused intel-modrm
13086 * @opcpuid sse
13087 * @optest ->
13088 */
13089 else
13090 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13091}
13092
13093
13094/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
13095FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
13096{
13097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13098 /* Docs says register only. */
13099 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13100 {
13101 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13102 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
13103 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13105 IEM_MC_ARG(uint64_t *, puDst, 0);
13106 IEM_MC_ARG(uint64_t const *, puSrc, 1);
13107 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13108 IEM_MC_PREPARE_FPU_USAGE();
13109 IEM_MC_FPU_TO_MMX_MODE();
13110
13111 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13112 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
13113 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
13114
13115 IEM_MC_ADVANCE_RIP_AND_FINISH();
13116 IEM_MC_END();
13117 }
13118 else
13119 IEMOP_RAISE_INVALID_OPCODE_RET();
13120}
13121
13122
13123/** Opcode 0x66 0x0f 0xd7 - */
13124FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
13125{
13126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13127 /* Docs says register only. */
13128 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13129 {
13130 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13131 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
13132 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13134 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13135 IEM_MC_PREPARE_SSE_USAGE();
13136 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
13137 IEM_MC_LIVENESS_GREG_CLOBBER(IEM_GET_MODRM_REG(pVCpu, bRm));
13138 IEM_MC_LIVENESS_XREG_INPUT(IEM_GET_MODRM_RM(pVCpu, bRm));
13139 IEM_MC_NATIVE_EMIT_2(iemNativeEmit_pmovmskb_rr_u128, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
13140 } IEM_MC_NATIVE_ELSE() {
13141 IEM_MC_ARG(uint64_t *, puDst, 0);
13142 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
13143 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13144 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
13145 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
13146 } IEM_MC_NATIVE_ENDIF();
13147 IEM_MC_ADVANCE_RIP_AND_FINISH();
13148 IEM_MC_END();
13149 }
13150 else
13151 IEMOP_RAISE_INVALID_OPCODE_RET();
13152}
13153
13154
13155/* Opcode 0xf3 0x0f 0xd7 - invalid */
13156/* Opcode 0xf2 0x0f 0xd7 - invalid */
13157
13158
13159/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
13160FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
13161{
13162 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13163 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubusb_u64);
13164}
13165
13166
13167/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
13168FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
13169{
13170 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13171 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubusb_u128);
13172}
13173
13174
13175/* Opcode 0xf3 0x0f 0xd8 - invalid */
13176/* Opcode 0xf2 0x0f 0xd8 - invalid */
13177
13178/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13179FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13180{
13181 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13182 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubusw_u64);
13183}
13184
13185
13186/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13187FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13188{
13189 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13190 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubusw_u128);
13191}
13192
13193
13194/* Opcode 0xf3 0x0f 0xd9 - invalid */
13195/* Opcode 0xf2 0x0f 0xd9 - invalid */
13196
13197/** Opcode 0x0f 0xda - pminub Pq, Qq */
13198FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13199{
13200 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13201 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pminub_u64);
13202}
13203
13204
13205/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13206FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13207{
13208 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13209 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pminub_u128);
13210}
13211
13212/* Opcode 0xf3 0x0f 0xda - invalid */
13213/* Opcode 0xf2 0x0f 0xda - invalid */
13214
13215/** Opcode 0x0f 0xdb - pand Pq, Qq */
13216FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13217{
13218 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13219 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pand_u64);
13220}
13221
13222
13223/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13224FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13225{
13226 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13227 SSE2_OPT_BODY_FullFull_To_Full(pand, iemAImpl_pand_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13228}
13229
13230
13231/* Opcode 0xf3 0x0f 0xdb - invalid */
13232/* Opcode 0xf2 0x0f 0xdb - invalid */
13233
13234/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13235FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13236{
13237 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13238 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddusb_u64);
13239}
13240
13241
13242/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13243FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13244{
13245 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13246 SSE2_OPT_BODY_FullFull_To_Full(paddusb, iemAImpl_paddusb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13247}
13248
13249
13250/* Opcode 0xf3 0x0f 0xdc - invalid */
13251/* Opcode 0xf2 0x0f 0xdc - invalid */
13252
13253/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13254FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13255{
13256 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13257 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddusw_u64);
13258}
13259
13260
13261/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13262FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13263{
13264 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13265 SSE2_OPT_BODY_FullFull_To_Full(paddusw, iemAImpl_paddusw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13266}
13267
13268
13269/* Opcode 0xf3 0x0f 0xdd - invalid */
13270/* Opcode 0xf2 0x0f 0xdd - invalid */
13271
13272/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13273FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13274{
13275 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13276 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmaxub_u64);
13277}
13278
13279
13280/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13281FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13282{
13283 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13284 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaxub_u128);
13285}
13286
13287/* Opcode 0xf3 0x0f 0xde - invalid */
13288/* Opcode 0xf2 0x0f 0xde - invalid */
13289
13290
13291/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13292FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13293{
13294 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13295 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pandn_u64);
13296}
13297
13298
13299/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13300FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13301{
13302 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13303 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pandn_u128);
13304}
13305
13306
13307/* Opcode 0xf3 0x0f 0xdf - invalid */
13308/* Opcode 0xf2 0x0f 0xdf - invalid */
13309
13310/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13311FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13312{
13313 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13314 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13315}
13316
13317
13318/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13319FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13320{
13321 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13322 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13323}
13324
13325
13326/* Opcode 0xf3 0x0f 0xe0 - invalid */
13327/* Opcode 0xf2 0x0f 0xe0 - invalid */
13328
13329/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13330FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13331{
13332 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13333 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13334}
13335
13336
13337/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13338FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13339{
13340 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13341 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13342}
13343
13344
13345/* Opcode 0xf3 0x0f 0xe1 - invalid */
13346/* Opcode 0xf2 0x0f 0xe1 - invalid */
13347
13348/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13349FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13350{
13351 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13352 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13353}
13354
13355
13356/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13357FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13358{
13359 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13360 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13361}
13362
13363
13364/* Opcode 0xf3 0x0f 0xe2 - invalid */
13365/* Opcode 0xf2 0x0f 0xe2 - invalid */
13366
13367/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13368FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13369{
13370 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13371 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13372}
13373
13374
13375/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13376FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13377{
13378 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13379 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13380}
13381
13382
13383/* Opcode 0xf3 0x0f 0xe3 - invalid */
13384/* Opcode 0xf2 0x0f 0xe3 - invalid */
13385
13386/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13387FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13388{
13389 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13390 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13391}
13392
13393
13394/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13395FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13396{
13397 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13398 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13399}
13400
13401
13402/* Opcode 0xf3 0x0f 0xe4 - invalid */
13403/* Opcode 0xf2 0x0f 0xe4 - invalid */
13404
13405/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13406FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13407{
13408 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13409 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmulhw_u64);
13410}
13411
13412
13413/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13414FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13415{
13416 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13417 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhw_u128);
13418}
13419
13420
13421/* Opcode 0xf3 0x0f 0xe5 - invalid */
13422/* Opcode 0xf2 0x0f 0xe5 - invalid */
13423/* Opcode 0x0f 0xe6 - invalid */
13424
13425
13426/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13427FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13428{
13429 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13430 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13431}
13432
13433
13434/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13435FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13436{
13437 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13438 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13439}
13440
13441
13442/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13443FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13444{
13445 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13446 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13447}
13448
13449
13450/**
13451 * @opcode 0xe7
13452 * @opcodesub !11 mr/reg
13453 * @oppfx none
13454 * @opcpuid sse
13455 * @opgroup og_sse1_cachect
13456 * @opxcpttype none
13457 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13458 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13459 */
13460FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13461{
13462 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13464 if (IEM_IS_MODRM_MEM_MODE(bRm))
13465 {
13466 /* Register, memory. */
13467 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13468 IEM_MC_LOCAL(uint64_t, uSrc);
13469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13470
13471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13473 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13474 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13475 IEM_MC_FPU_TO_MMX_MODE();
13476
13477 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13478 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13479
13480 IEM_MC_ADVANCE_RIP_AND_FINISH();
13481 IEM_MC_END();
13482 }
13483 /**
13484 * @opdone
13485 * @opmnemonic ud0fe7reg
13486 * @opcode 0xe7
13487 * @opcodesub 11 mr/reg
13488 * @oppfx none
13489 * @opunused immediate
13490 * @opcpuid sse
13491 * @optest ->
13492 */
13493 else
13494 IEMOP_RAISE_INVALID_OPCODE_RET();
13495}
13496
13497/**
13498 * @opcode 0xe7
13499 * @opcodesub !11 mr/reg
13500 * @oppfx 0x66
13501 * @opcpuid sse2
13502 * @opgroup og_sse2_cachect
13503 * @opxcpttype 1
13504 * @optest op1=-1 op2=2 -> op1=2
13505 * @optest op1=0 op2=-42 -> op1=-42
13506 */
13507FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13508{
13509 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13511 if (IEM_IS_MODRM_MEM_MODE(bRm))
13512 {
13513 /* Register, memory. */
13514 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13515 IEM_MC_LOCAL(RTUINT128U, uSrc);
13516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13517
13518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13520 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13521 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13522
13523 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13524 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13525
13526 IEM_MC_ADVANCE_RIP_AND_FINISH();
13527 IEM_MC_END();
13528 }
13529
13530 /**
13531 * @opdone
13532 * @opmnemonic ud660fe7reg
13533 * @opcode 0xe7
13534 * @opcodesub 11 mr/reg
13535 * @oppfx 0x66
13536 * @opunused immediate
13537 * @opcpuid sse
13538 * @optest ->
13539 */
13540 else
13541 IEMOP_RAISE_INVALID_OPCODE_RET();
13542}
13543
13544/* Opcode 0xf3 0x0f 0xe7 - invalid */
13545/* Opcode 0xf2 0x0f 0xe7 - invalid */
13546
13547
13548/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13549FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13550{
13551 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13552 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubsb_u64);
13553}
13554
13555
13556/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13557FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13558{
13559 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13560 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubsb_u128);
13561}
13562
13563
13564/* Opcode 0xf3 0x0f 0xe8 - invalid */
13565/* Opcode 0xf2 0x0f 0xe8 - invalid */
13566
13567/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13568FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13569{
13570 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13571 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubsw_u64);
13572}
13573
13574
13575/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13576FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13577{
13578 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13579 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubsw_u128);
13580}
13581
13582
13583/* Opcode 0xf3 0x0f 0xe9 - invalid */
13584/* Opcode 0xf2 0x0f 0xe9 - invalid */
13585
13586
13587/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13588FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13589{
13590 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13591 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pminsw_u64);
13592}
13593
13594
13595/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13596FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13597{
13598 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13599 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pminsw_u128);
13600}
13601
13602
13603/* Opcode 0xf3 0x0f 0xea - invalid */
13604/* Opcode 0xf2 0x0f 0xea - invalid */
13605
13606
13607/** Opcode 0x0f 0xeb - por Pq, Qq */
13608FNIEMOP_DEF(iemOp_por_Pq_Qq)
13609{
13610 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13611 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_por_u64);
13612}
13613
13614
13615/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13616FNIEMOP_DEF(iemOp_por_Vx_Wx)
13617{
13618 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13619 SSE2_OPT_BODY_FullFull_To_Full(por, iemAImpl_por_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13620}
13621
13622
13623/* Opcode 0xf3 0x0f 0xeb - invalid */
13624/* Opcode 0xf2 0x0f 0xeb - invalid */
13625
13626/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13627FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13628{
13629 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13630 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddsb_u64);
13631}
13632
13633
13634/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13635FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13636{
13637 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13638 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddsb_u128);
13639}
13640
13641
13642/* Opcode 0xf3 0x0f 0xec - invalid */
13643/* Opcode 0xf2 0x0f 0xec - invalid */
13644
13645/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13646FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13647{
13648 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13649 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddsw_u64);
13650}
13651
13652
13653/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13654FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13655{
13656 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13657 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddsw_u128);
13658}
13659
13660
13661/* Opcode 0xf3 0x0f 0xed - invalid */
13662/* Opcode 0xf2 0x0f 0xed - invalid */
13663
13664
13665/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13666FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13667{
13668 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13669 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13670}
13671
13672
13673/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13674FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13675{
13676 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13677 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13678}
13679
13680
13681/* Opcode 0xf3 0x0f 0xee - invalid */
13682/* Opcode 0xf2 0x0f 0xee - invalid */
13683
13684
13685/** Opcode 0x0f 0xef - pxor Pq, Qq */
13686FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13687{
13688 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13689 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pxor_u64);
13690}
13691
13692
13693/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13694FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13695{
13696 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13697 SSE2_OPT_BODY_FullFull_To_Full(pxor, iemAImpl_pxor_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13698}
13699
13700
13701/* Opcode 0xf3 0x0f 0xef - invalid */
13702/* Opcode 0xf2 0x0f 0xef - invalid */
13703
13704/* Opcode 0x0f 0xf0 - invalid */
13705/* Opcode 0x66 0x0f 0xf0 - invalid */
13706
13707
13708/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13709FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13710{
13711 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13713 if (IEM_IS_MODRM_REG_MODE(bRm))
13714 {
13715 /*
13716 * Register, register - (not implemented, assuming it raises \#UD).
13717 */
13718 IEMOP_RAISE_INVALID_OPCODE_RET();
13719 }
13720 else
13721 {
13722 /*
13723 * Register, memory.
13724 */
13725 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13726 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13728
13729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
13731 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13732 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13733 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13734 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13735
13736 IEM_MC_ADVANCE_RIP_AND_FINISH();
13737 IEM_MC_END();
13738 }
13739}
13740
13741
13742/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13743FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13744{
13745 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13746 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13747}
13748
13749
13750/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13751FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13752{
13753 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13754 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13755}
13756
13757
13758/* Opcode 0xf2 0x0f 0xf1 - invalid */
13759
13760/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13761FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13762{
13763 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13764 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13765}
13766
13767
13768/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13769FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13770{
13771 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13772 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13773}
13774
13775
13776/* Opcode 0xf2 0x0f 0xf2 - invalid */
13777
13778/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13779FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13780{
13781 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13782 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13783}
13784
13785
13786/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13787FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13788{
13789 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13790 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13791}
13792
13793/* Opcode 0xf2 0x0f 0xf3 - invalid */
13794
13795/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13796FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13797{
13798 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13799 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmuludq_u64);
13800}
13801
13802
13803/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13804FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13805{
13806 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13807 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmuludq_u128);
13808}
13809
13810
13811/* Opcode 0xf2 0x0f 0xf4 - invalid */
13812
13813/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13814FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13815{
13816 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13817 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13818}
13819
13820
13821/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13822FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13823{
13824 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13825 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13826}
13827
13828/* Opcode 0xf2 0x0f 0xf5 - invalid */
13829
13830/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13831FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13832{
13833 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13834 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13835}
13836
13837
13838/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13839FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13840{
13841 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13842 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13843}
13844
13845
13846/* Opcode 0xf2 0x0f 0xf6 - invalid */
13847
13848/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13849FNIEMOP_DEF(iemOp_maskmovq_Pq_Nq)
13850{
13851// IEMOP_MNEMONIC2(RM, MASKMOVQ, maskmovq, Pq, Nq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
13852 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13853 if (IEM_IS_MODRM_REG_MODE(bRm))
13854 {
13855 /*
13856 * MMX, MMX, (implicit) [ ER]DI
13857 */
13858 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13860 IEM_MC_LOCAL( uint64_t, u64EffAddr);
13861 IEM_MC_LOCAL( uint64_t, u64Mem);
13862 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Mem, u64Mem, 0);
13863 IEM_MC_ARG( uint64_t const *, puSrc, 1);
13864 IEM_MC_ARG( uint64_t const *, puMsk, 2);
13865 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13866 IEM_MC_PREPARE_FPU_USAGE();
13867 IEM_MC_FPU_TO_MMX_MODE();
13868
13869 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
13870 IEM_MC_FETCH_MEM_U64(u64Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
13871 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_REG_8(bRm));
13872 IEM_MC_REF_MREG_U64_CONST(puMsk, IEM_GET_MODRM_RM_8(bRm));
13873 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovq_u64, pu64Mem, puSrc, puMsk);
13874 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, u64EffAddr, u64Mem);
13875
13876 IEM_MC_ADVANCE_RIP_AND_FINISH();
13877 IEM_MC_END();
13878 }
13879 else
13880 {
13881 /* The memory, register encoding is invalid. */
13882 IEMOP_RAISE_INVALID_OPCODE_RET();
13883 }
13884}
13885
13886
13887/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13888FNIEMOP_DEF(iemOp_maskmovdqu_Vdq_Udq)
13889{
13890// IEMOP_MNEMONIC2(RM, MASKMOVDQU, maskmovdqu, Vdq, Udq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
13891 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13892 if (IEM_IS_MODRM_REG_MODE(bRm))
13893 {
13894 /*
13895 * XMM, XMM, (implicit) [ ER]DI
13896 */
13897 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13899 IEM_MC_LOCAL( uint64_t, u64EffAddr);
13900 IEM_MC_LOCAL( RTUINT128U, u128Mem);
13901 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128Mem, u128Mem, 0);
13902 IEM_MC_ARG( PCRTUINT128U, puSrc, 1);
13903 IEM_MC_ARG( PCRTUINT128U, puMsk, 2);
13904 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13905 IEM_MC_PREPARE_SSE_USAGE();
13906
13907 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
13908 IEM_MC_FETCH_MEM_U128(u128Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
13909 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13910 IEM_MC_REF_XREG_U128_CONST(puMsk, IEM_GET_MODRM_RM(pVCpu, bRm));
13911 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovdqu_u128, pu128Mem, puSrc, puMsk);
13912 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, u64EffAddr, u128Mem);
13913
13914 IEM_MC_ADVANCE_RIP_AND_FINISH();
13915 IEM_MC_END();
13916 }
13917 else
13918 {
13919 /* The memory, register encoding is invalid. */
13920 IEMOP_RAISE_INVALID_OPCODE_RET();
13921 }
13922}
13923
13924
13925/* Opcode 0xf2 0x0f 0xf7 - invalid */
13926
13927
13928/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13929FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13930{
13931 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13932 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubb_u64);
13933}
13934
13935
13936/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13937FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13938{
13939 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13940 SSE2_OPT_BODY_FullFull_To_Full(psubb, iemAImpl_psubb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13941}
13942
13943
13944/* Opcode 0xf2 0x0f 0xf8 - invalid */
13945
13946
13947/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13948FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13949{
13950 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13951 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubw_u64);
13952}
13953
13954
13955/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13956FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13957{
13958 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13959 SSE2_OPT_BODY_FullFull_To_Full(psubw, iemAImpl_psubw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13960}
13961
13962
13963/* Opcode 0xf2 0x0f 0xf9 - invalid */
13964
13965
13966/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13967FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13968{
13969 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13970 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubd_u64);
13971}
13972
13973
13974/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13975FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13976{
13977 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13978 SSE2_OPT_BODY_FullFull_To_Full(psubd, iemAImpl_psubd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13979}
13980
13981
13982/* Opcode 0xf2 0x0f 0xfa - invalid */
13983
13984
13985/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13986FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13987{
13988 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13989 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
13990}
13991
13992
13993/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13994FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13995{
13996 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13997 SSE2_OPT_BODY_FullFull_To_Full(psubq, iemAImpl_psubq_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13998}
13999
14000
14001/* Opcode 0xf2 0x0f 0xfb - invalid */
14002
14003
14004/** Opcode 0x0f 0xfc - paddb Pq, Qq */
14005FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
14006{
14007 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14008 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddb_u64);
14009}
14010
14011
14012/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
14013FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
14014{
14015 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14016 SSE2_OPT_BODY_FullFull_To_Full(paddb, iemAImpl_paddb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14017}
14018
14019
14020/* Opcode 0xf2 0x0f 0xfc - invalid */
14021
14022
14023/** Opcode 0x0f 0xfd - paddw Pq, Qq */
14024FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
14025{
14026 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14027 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddw_u64);
14028}
14029
14030
14031/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
14032FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
14033{
14034 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14035 SSE2_OPT_BODY_FullFull_To_Full(paddw, iemAImpl_paddw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14036}
14037
14038
14039/* Opcode 0xf2 0x0f 0xfd - invalid */
14040
14041
14042/** Opcode 0x0f 0xfe - paddd Pq, Qq */
14043FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
14044{
14045 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14046 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddd_u64);
14047}
14048
14049
14050/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
14051FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
14052{
14053 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14054 SSE2_OPT_BODY_FullFull_To_Full(paddd, iemAImpl_paddd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14055}
14056
14057
14058/* Opcode 0xf2 0x0f 0xfe - invalid */
14059
14060
14061/** Opcode **** 0x0f 0xff - UD0 */
14062FNIEMOP_DEF(iemOp_ud0)
14063{
14064 IEMOP_MNEMONIC(ud0, "ud0");
14065 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
14066 {
14067 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
14068 if (IEM_IS_MODRM_MEM_MODE(bRm))
14069 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
14070 }
14071 IEMOP_HLP_DONE_DECODING();
14072 IEMOP_RAISE_INVALID_OPCODE_RET();
14073}
14074
14075
14076
14077/**
14078 * Two byte opcode map, first byte 0x0f.
14079 *
14080 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
14081 * check if it needs updating as well when making changes.
14082 */
14083const PFNIEMOP g_apfnTwoByteMap[] =
14084{
14085 /* no prefix, 066h prefix f3h prefix, f2h prefix */
14086 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
14087 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
14088 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
14089 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
14090 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
14091 /* 0x05 */ IEMOP_X4(iemOp_syscall),
14092 /* 0x06 */ IEMOP_X4(iemOp_clts),
14093 /* 0x07 */ IEMOP_X4(iemOp_sysret),
14094 /* 0x08 */ IEMOP_X4(iemOp_invd),
14095 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
14096 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
14097 /* 0x0b */ IEMOP_X4(iemOp_ud2),
14098 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
14099 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
14100 /* 0x0e */ IEMOP_X4(iemOp_femms),
14101 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
14102
14103 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
14104 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
14105 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
14106 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14107 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14108 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14109 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
14110 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14111 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
14112 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
14113 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
14114 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
14115 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
14116 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
14117 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
14118 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
14119
14120 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
14121 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
14122 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
14123 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
14124 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
14125 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14126 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
14127 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14128 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14129 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14130 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
14131 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14132 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
14133 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
14134 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14135 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14136
14137 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
14138 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
14139 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
14140 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
14141 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
14142 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
14143 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
14144 /* 0x37 */ IEMOP_X4(iemOp_getsec),
14145 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
14146 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14147 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
14148 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14149 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14150 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14151 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14152 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14153
14154 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
14155 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
14156 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
14157 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
14158 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
14159 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
14160 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
14161 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
14162 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
14163 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
14164 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
14165 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
14166 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
14167 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
14168 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
14169 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
14170
14171 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14172 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
14173 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
14174 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
14175 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14176 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14177 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14178 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14179 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
14180 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
14181 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
14182 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
14183 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
14184 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
14185 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
14186 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
14187
14188 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14189 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14190 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14191 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14192 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14193 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14194 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14195 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14196 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14197 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14198 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14199 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14200 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14201 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14202 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14203 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
14204
14205 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
14206 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
14207 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
14208 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
14209 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14210 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14211 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14212 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14213
14214 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14215 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14216 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14217 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14218 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
14219 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
14220 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
14221 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
14222
14223 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
14224 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
14225 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
14226 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
14227 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
14228 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
14229 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
14230 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
14231 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
14232 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
14233 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
14234 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
14235 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14236 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14237 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14238 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14239
14240 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14241 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14242 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14243 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14244 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14245 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14246 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14247 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14248 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14249 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14250 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14251 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14252 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14253 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14254 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14255 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14256
14257 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14258 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14259 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14260 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14261 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14262 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14263 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14264 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14265 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14266 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14267 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14268 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14269 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14270 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14271 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14272 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14273
14274 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14275 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14276 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14277 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14278 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14279 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14280 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14281 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14282 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14283 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14284 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14285 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14286 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14287 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14288 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14289 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14290
14291 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14292 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14293 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14294 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14295 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14296 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14297 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14298 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14299 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14300 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14301 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14302 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14303 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14304 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14305 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14306 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14307
14308 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14309 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14310 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14311 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14312 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14313 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14314 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14315 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14316 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14317 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14318 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14319 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14320 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14321 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14322 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14323 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14324
14325 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14326 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14327 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14328 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14329 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14330 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14331 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14332 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14333 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14334 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14335 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14336 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14337 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14338 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14339 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14340 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14341
14342 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14343 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14344 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14345 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14346 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14347 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14348 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14349 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14350 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14351 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14352 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14353 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14354 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14355 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14356 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14357 /* 0xff */ IEMOP_X4(iemOp_ud0),
14358};
14359AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14360
14361/** @} */
14362
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette