VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h@ 106432

Last change on this file since 106432 was 106097, checked in by vboxsync, 2 months ago

VMM/IEM: Liveness fix for MXCSR modifying in addps and friends. bugref:10652 bugref:10372

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 518.2 KB
Line 
1/* $Id: IEMAllInstTwoByte0f.cpp.h 106097 2024-09-19 14:27:50Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2024 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 *
42 * The @a pfnU64 worker function takes no FXSAVE state, just the operands.
43 */
44FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
45{
46 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
47 if (IEM_IS_MODRM_REG_MODE(bRm))
48 {
49 /*
50 * MMX, MMX.
51 */
52 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
53 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
54 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
55 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
56 IEM_MC_ARG(uint64_t *, pDst, 0);
57 IEM_MC_ARG(uint64_t const *, pSrc, 1);
58 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
59 IEM_MC_PREPARE_FPU_USAGE();
60 IEM_MC_FPU_TO_MMX_MODE();
61
62 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
63 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
64 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
65 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
66
67 IEM_MC_ADVANCE_RIP_AND_FINISH();
68 IEM_MC_END();
69 }
70 else
71 {
72 /*
73 * MMX, [mem64].
74 */
75 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
76 IEM_MC_ARG(uint64_t *, pDst, 0);
77 IEM_MC_LOCAL(uint64_t, uSrc);
78 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
79 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
80
81 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
82 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
83 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
84 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
85
86 IEM_MC_PREPARE_FPU_USAGE();
87 IEM_MC_FPU_TO_MMX_MODE();
88
89 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
90 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
91 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
92
93 IEM_MC_ADVANCE_RIP_AND_FINISH();
94 IEM_MC_END();
95 }
96}
97
98
99/**
100 * Common worker for MMX instructions on the form:
101 * pxxx mm1, mm2/mem64
102 * for instructions introduced with SSE.
103 *
104 * The @a pfnU64 worker function takes no FXSAVE state, just the operands.
105 */
106FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
107{
108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
109 if (IEM_IS_MODRM_REG_MODE(bRm))
110 {
111 /*
112 * MMX, MMX.
113 */
114 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
115 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
116 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
118 IEM_MC_ARG(uint64_t *, pDst, 0);
119 IEM_MC_ARG(uint64_t const *, pSrc, 1);
120 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
121 IEM_MC_PREPARE_FPU_USAGE();
122 IEM_MC_FPU_TO_MMX_MODE();
123
124 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
125 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
126 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
127 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
128
129 IEM_MC_ADVANCE_RIP_AND_FINISH();
130 IEM_MC_END();
131 }
132 else
133 {
134 /*
135 * MMX, [mem64].
136 */
137 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
138 IEM_MC_ARG(uint64_t *, pDst, 0);
139 IEM_MC_LOCAL(uint64_t, uSrc);
140 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
142
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
145 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
146 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
147
148 IEM_MC_PREPARE_FPU_USAGE();
149 IEM_MC_FPU_TO_MMX_MODE();
150
151 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
152 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
153 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
154
155 IEM_MC_ADVANCE_RIP_AND_FINISH();
156 IEM_MC_END();
157 }
158}
159
160
161/**
162 * Common worker for MMX instructions on the form:
163 * pxxx mm1, mm2/mem64
164 * that was introduced with SSE2.
165 */
166FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
167{
168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
169 if (IEM_IS_MODRM_REG_MODE(bRm))
170 {
171 /*
172 * MMX, MMX.
173 */
174 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
175 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
176 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
178 IEM_MC_ARG(uint64_t *, pDst, 0);
179 IEM_MC_ARG(uint64_t const *, pSrc, 1);
180 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
181 IEM_MC_PREPARE_FPU_USAGE();
182 IEM_MC_FPU_TO_MMX_MODE();
183
184 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
185 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
186 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
187 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
188
189 IEM_MC_ADVANCE_RIP_AND_FINISH();
190 IEM_MC_END();
191 }
192 else
193 {
194 /*
195 * MMX, [mem64].
196 */
197 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
198 IEM_MC_ARG(uint64_t *, pDst, 0);
199 IEM_MC_LOCAL(uint64_t, uSrc);
200 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
202
203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
205 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
206 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
207
208 IEM_MC_PREPARE_FPU_USAGE();
209 IEM_MC_FPU_TO_MMX_MODE();
210
211 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
212 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
213 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
214
215 IEM_MC_ADVANCE_RIP_AND_FINISH();
216 IEM_MC_END();
217 }
218}
219
220
221/**
222 * Common worker for SSE instructions of the form:
223 * pxxx xmm1, xmm2/mem128
224 *
225 * Proper alignment of the 128-bit operand is enforced.
226 * SSE cpuid checks. No SIMD FP exceptions.
227 *
228 * The @a pfnU128 worker function takes no FXSAVE state, just the operands.
229 *
230 * @sa iemOpCommonSse2_FullFull_To_Full
231 */
232FNIEMOP_DEF_1(iemOpCommonSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
233{
234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
235 if (IEM_IS_MODRM_REG_MODE(bRm))
236 {
237 /*
238 * XMM, XMM.
239 */
240 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
242 IEM_MC_ARG(PRTUINT128U, pDst, 0);
243 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
244 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
245 IEM_MC_PREPARE_SSE_USAGE();
246 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
248 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
249 IEM_MC_ADVANCE_RIP_AND_FINISH();
250 IEM_MC_END();
251 }
252 else
253 {
254 /*
255 * XMM, [mem128].
256 */
257 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
258 IEM_MC_ARG(PRTUINT128U, pDst, 0);
259 IEM_MC_LOCAL(RTUINT128U, uSrc);
260 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
262
263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
265 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
266 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
267
268 IEM_MC_PREPARE_SSE_USAGE();
269 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
270 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
271
272 IEM_MC_ADVANCE_RIP_AND_FINISH();
273 IEM_MC_END();
274 }
275}
276
277
278/**
279 * Common worker for SSE2 instructions on the forms:
280 * pxxx xmm1, xmm2/mem128
281 *
282 * Proper alignment of the 128-bit operand is enforced.
283 * Exceptions type 4. SSE2 cpuid checks.
284 *
285 * The @a pfnU128 worker function takes no FXSAVE state, just the operands.
286 *
287 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
288 */
289FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
290{
291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
292 if (IEM_IS_MODRM_REG_MODE(bRm))
293 {
294 /*
295 * XMM, XMM.
296 */
297 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
299 IEM_MC_ARG(PRTUINT128U, pDst, 0);
300 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
302 IEM_MC_PREPARE_SSE_USAGE();
303 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
304 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
305 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
306 IEM_MC_ADVANCE_RIP_AND_FINISH();
307 IEM_MC_END();
308 }
309 else
310 {
311 /*
312 * XMM, [mem128].
313 */
314 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
315 IEM_MC_ARG(PRTUINT128U, pDst, 0);
316 IEM_MC_LOCAL(RTUINT128U, uSrc);
317 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
319
320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
322 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
323 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
324
325 IEM_MC_PREPARE_SSE_USAGE();
326 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
327 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
328
329 IEM_MC_ADVANCE_RIP_AND_FINISH();
330 IEM_MC_END();
331 }
332}
333
334
335/**
336 * A body preprocessor variant of iemOpCommonSse2Opt_FullFull_To_Full in order
337 * to support native emitters for certain instructions.
338 */
339#define SSE2_OPT_BODY_FullFull_To_Full(a_Ins, a_pImplExpr, a_fRegNativeArchs, a_fMemNativeArchs) \
340 PFNIEMAIMPLMEDIAOPTF2U128 const pfnU128 = (a_pImplExpr); \
341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
342 if (IEM_IS_MODRM_REG_MODE(bRm)) \
343 { \
344 /* \
345 * XMM, XMM. \
346 */ \
347 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2); \
349 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
350 IEM_MC_PREPARE_SSE_USAGE(); \
351 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
352 IEM_MC_NATIVE_EMIT_2(RT_CONCAT3(iemNativeEmit_,a_Ins,_rr_u128), IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
353 } IEM_MC_NATIVE_ELSE() { \
354 IEM_MC_ARG(PRTUINT128U, pDst, 0); \
355 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
356 IEM_MC_ARG(PCRTUINT128U, pSrc, 1); \
357 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
358 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc); \
359 } IEM_MC_NATIVE_ENDIF(); \
360 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
361 IEM_MC_END(); \
362 } \
363 else \
364 { \
365 /* \
366 * XMM, [mem128]. \
367 */ \
368 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
369 IEM_MC_LOCAL(RTUINT128U, uSrc); \
370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2); \
373 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
374 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
375 IEM_MC_PREPARE_SSE_USAGE(); \
376 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
377 IEM_MC_NATIVE_EMIT_2(RT_CONCAT3(iemNativeEmit_,a_Ins,_rv_u128), IEM_GET_MODRM_REG(pVCpu, bRm), uSrc); \
378 } IEM_MC_NATIVE_ELSE() { \
379 IEM_MC_ARG(PRTUINT128U, pDst, 0); \
380 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
381 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1); \
382 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc); \
383 } IEM_MC_NATIVE_ENDIF(); \
384 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
385 IEM_MC_END(); \
386 } void(0)
387
388
389/**
390 * Common worker for MMX instructions on the forms:
391 * pxxxx mm1, mm2/mem32
392 *
393 * The 2nd operand is the first half of a register, which in the memory case
394 * means a 32-bit memory access.
395 */
396FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
397{
398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
399 if (IEM_IS_MODRM_REG_MODE(bRm))
400 {
401 /*
402 * MMX, MMX.
403 */
404 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
406 IEM_MC_ARG(uint64_t *, puDst, 0);
407 IEM_MC_ARG(uint64_t const *, puSrc, 1);
408 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
409 IEM_MC_PREPARE_FPU_USAGE();
410 IEM_MC_FPU_TO_MMX_MODE();
411
412 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
413 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
414 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
415 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
416
417 IEM_MC_ADVANCE_RIP_AND_FINISH();
418 IEM_MC_END();
419 }
420 else
421 {
422 /*
423 * MMX, [mem32].
424 */
425 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
426 IEM_MC_ARG(uint64_t *, puDst, 0);
427 IEM_MC_LOCAL(uint64_t, uSrc);
428 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
430
431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
433 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
434 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
435
436 IEM_MC_PREPARE_FPU_USAGE();
437 IEM_MC_FPU_TO_MMX_MODE();
438
439 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
440 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
441 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
442
443 IEM_MC_ADVANCE_RIP_AND_FINISH();
444 IEM_MC_END();
445 }
446}
447
448
449/**
450 * Common worker for SSE instructions on the forms:
451 * pxxxx xmm1, xmm2/mem128
452 *
453 * The 2nd operand is the first half of a register, which in the memory case
454 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
455 *
456 * Exceptions type 4.
457 */
458FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
459{
460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
461 if (IEM_IS_MODRM_REG_MODE(bRm))
462 {
463 /*
464 * XMM, XMM.
465 */
466 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
468 IEM_MC_ARG(PRTUINT128U, puDst, 0);
469 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
470 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
471 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
472 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
473 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
474 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
475 IEM_MC_ADVANCE_RIP_AND_FINISH();
476 IEM_MC_END();
477 }
478 else
479 {
480 /*
481 * XMM, [mem128].
482 */
483 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
484 IEM_MC_ARG(PRTUINT128U, puDst, 0);
485 IEM_MC_LOCAL(RTUINT128U, uSrc);
486 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
488
489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
491 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
492 /** @todo Most CPUs probably only read the low qword. We read everything to
493 * make sure we apply segmentation and alignment checks correctly.
494 * When we have time, it would be interesting to explore what real
495 * CPUs actually does and whether it will do a TLB load for the high
496 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
497 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
498
499 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
500 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
501 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
502
503 IEM_MC_ADVANCE_RIP_AND_FINISH();
504 IEM_MC_END();
505 }
506}
507
508
509/**
510 * Common worker for SSE2 instructions on the forms:
511 * pxxxx xmm1, xmm2/mem128
512 *
513 * The 2nd operand is the first half of a register, which in the memory case
514 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
515 *
516 * Exceptions type 4.
517 */
518FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
519{
520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
521 if (IEM_IS_MODRM_REG_MODE(bRm))
522 {
523 /*
524 * XMM, XMM.
525 */
526 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
528 IEM_MC_ARG(PRTUINT128U, puDst, 0);
529 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
530 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
531 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
532 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
533 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
534 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
535 IEM_MC_ADVANCE_RIP_AND_FINISH();
536 IEM_MC_END();
537 }
538 else
539 {
540 /*
541 * XMM, [mem128].
542 */
543 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
544 IEM_MC_ARG(PRTUINT128U, puDst, 0);
545 IEM_MC_LOCAL(RTUINT128U, uSrc);
546 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
548
549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
551 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
552 /** @todo Most CPUs probably only read the low qword. We read everything to
553 * make sure we apply segmentation and alignment checks correctly.
554 * When we have time, it would be interesting to explore what real
555 * CPUs actually does and whether it will do a TLB load for the high
556 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
557 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
558
559 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
560 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
562
563 IEM_MC_ADVANCE_RIP_AND_FINISH();
564 IEM_MC_END();
565 }
566}
567
568
569/**
570 * Common worker for MMX instructions on the form:
571 * pxxxx mm1, mm2/mem64
572 *
573 * The 2nd operand is the second half of a register, which in the memory case
574 * means a 64-bit memory access for MMX.
575 */
576FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
577{
578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
579 if (IEM_IS_MODRM_REG_MODE(bRm))
580 {
581 /*
582 * MMX, MMX.
583 */
584 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
585 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
586 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
588 IEM_MC_ARG(uint64_t *, puDst, 0);
589 IEM_MC_ARG(uint64_t const *, puSrc, 1);
590 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
591 IEM_MC_PREPARE_FPU_USAGE();
592 IEM_MC_FPU_TO_MMX_MODE();
593
594 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
595 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
596 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
597 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
598
599 IEM_MC_ADVANCE_RIP_AND_FINISH();
600 IEM_MC_END();
601 }
602 else
603 {
604 /*
605 * MMX, [mem64].
606 */
607 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
608 IEM_MC_ARG(uint64_t *, puDst, 0);
609 IEM_MC_LOCAL(uint64_t, uSrc);
610 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
612
613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
615 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
616 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
617
618 IEM_MC_PREPARE_FPU_USAGE();
619 IEM_MC_FPU_TO_MMX_MODE();
620
621 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
623 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
624
625 IEM_MC_ADVANCE_RIP_AND_FINISH();
626 IEM_MC_END();
627 }
628}
629
630
631/**
632 * Common worker for SSE instructions on the form:
633 * pxxxx xmm1, xmm2/mem128
634 *
635 * The 2nd operand is the second half of a register, which for SSE a 128-bit
636 * aligned access where it may read the full 128 bits or only the upper 64 bits.
637 *
638 * Exceptions type 4.
639 */
640FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
641{
642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
643 if (IEM_IS_MODRM_REG_MODE(bRm))
644 {
645 /*
646 * XMM, XMM.
647 */
648 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
650 IEM_MC_ARG(PRTUINT128U, puDst, 0);
651 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
652 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
653 IEM_MC_PREPARE_SSE_USAGE();
654 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
655 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
656 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
657 IEM_MC_ADVANCE_RIP_AND_FINISH();
658 IEM_MC_END();
659 }
660 else
661 {
662 /*
663 * XMM, [mem128].
664 */
665 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
666 IEM_MC_ARG(PRTUINT128U, puDst, 0);
667 IEM_MC_LOCAL(RTUINT128U, uSrc);
668 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
670
671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
673 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
674 /** @todo Most CPUs probably only read the high qword. We read everything to
675 * make sure we apply segmentation and alignment checks correctly.
676 * When we have time, it would be interesting to explore what real
677 * CPUs actually does and whether it will do a TLB load for the lower
678 * part or skip any associated \#PF. */
679 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
680
681 IEM_MC_PREPARE_SSE_USAGE();
682 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
683 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
684
685 IEM_MC_ADVANCE_RIP_AND_FINISH();
686 IEM_MC_END();
687 }
688}
689
690
691/**
692 * Common worker for SSE instructions on the forms:
693 * pxxs xmm1, xmm2/mem128
694 *
695 * Proper alignment of the 128-bit operand is enforced.
696 * Exceptions type 2. SSE cpuid checks.
697 *
698 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
699 */
700FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
701{
702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
703 if (IEM_IS_MODRM_REG_MODE(bRm))
704 {
705 /*
706 * XMM128, XMM128.
707 */
708 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
710 IEM_MC_LOCAL(X86XMMREG, SseRes);
711 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
712 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
713 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
714 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
715 IEM_MC_PREPARE_SSE_USAGE();
716 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
717 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
718 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
719 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
720
721 IEM_MC_ADVANCE_RIP_AND_FINISH();
722 IEM_MC_END();
723 }
724 else
725 {
726 /*
727 * XMM128, [mem128].
728 */
729 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
730 IEM_MC_LOCAL(X86XMMREG, SseRes);
731 IEM_MC_LOCAL(X86XMMREG, uSrc2);
732 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
733 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
734 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
736
737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
739 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
740 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
741
742 IEM_MC_PREPARE_SSE_USAGE();
743 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
744 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
745 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
746
747 IEM_MC_ADVANCE_RIP_AND_FINISH();
748 IEM_MC_END();
749 }
750}
751
752
753/**
754 * A body preprocessor variant of iemOpCommonSseFp_FullFull_To_Full in order
755 * to support native emitters for certain instructions.
756 */
757#define SSE_FP_BODY_FullFull_To_Full(a_Ins, a_pImplExpr, a_fRegNativeArchs, a_fMemNativeArchs) \
758 PFNIEMAIMPLFPSSEF2U128 const pfnU128 = (a_pImplExpr); \
759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
760 if (IEM_IS_MODRM_REG_MODE(bRm)) \
761 { \
762 /* \
763 * XMM, XMM. \
764 */ \
765 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse); \
767 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
768 IEM_MC_PREPARE_SSE_USAGE(); \
769 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
770 IEM_MC_LIVENESS_MXCSR_MODIFY(); \
771 IEM_MC_NATIVE_EMIT_2_EX(RT_CONCAT3(iemNativeEmit_,a_Ins,_rr_u128), IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
772 } IEM_MC_NATIVE_ELSE() { \
773 IEM_MC_LOCAL(X86XMMREG, SseRes); \
774 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0); \
775 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1); \
776 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
777 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2); \
778 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
779 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2); \
780 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes); \
781 } IEM_MC_NATIVE_ENDIF(); \
782 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
783 IEM_MC_END(); \
784 } \
785 else \
786 { \
787 /* \
788 * XMM, [mem128]. \
789 */ \
790 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
791 IEM_MC_LOCAL(X86XMMREG, uSrc2); \
792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse); \
795 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
796 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
797 IEM_MC_PREPARE_SSE_USAGE(); \
798 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
799 IEM_MC_LIVENESS_MXCSR_MODIFY(); \
800 IEM_MC_NATIVE_EMIT_2_EX(RT_CONCAT3(iemNativeEmit_,a_Ins,_rv_u128), IEM_GET_MODRM_REG(pVCpu, bRm), uSrc2); \
801 } IEM_MC_NATIVE_ELSE() { \
802 IEM_MC_LOCAL(X86XMMREG, SseRes); \
803 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0); \
804 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1); \
805 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2); \
806 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
807 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2); \
808 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes); \
809 } IEM_MC_NATIVE_ENDIF(); \
810 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
811 IEM_MC_END(); \
812 } void(0)
813
814
815/**
816 * Common worker for SSE instructions on the forms:
817 * pxxs xmm1, xmm2/mem32
818 *
819 * Proper alignment of the 128-bit operand is enforced.
820 * Exceptions type 3. SSE cpuid checks.
821 *
822 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
823 */
824FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
825{
826 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
827 if (IEM_IS_MODRM_REG_MODE(bRm))
828 {
829 /*
830 * XMM128, XMM32.
831 */
832 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
834 IEM_MC_LOCAL(X86XMMREG, SseRes);
835 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
836 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
837 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
838 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
839 IEM_MC_PREPARE_SSE_USAGE();
840 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
841 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
842 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
843 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
844
845 IEM_MC_ADVANCE_RIP_AND_FINISH();
846 IEM_MC_END();
847 }
848 else
849 {
850 /*
851 * XMM128, [mem32].
852 */
853 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
854 IEM_MC_LOCAL(X86XMMREG, SseRes);
855 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
856 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
857 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
858 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
860
861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
863 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
864 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
865
866 IEM_MC_PREPARE_SSE_USAGE();
867 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
868 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
869 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
870
871 IEM_MC_ADVANCE_RIP_AND_FINISH();
872 IEM_MC_END();
873 }
874}
875
876
877/**
878 * Common worker for SSE2 instructions on the forms:
879 * pxxd xmm1, xmm2/mem128
880 *
881 * Proper alignment of the 128-bit operand is enforced.
882 * Exceptions type 2. SSE cpuid checks.
883 *
884 * @sa iemOpCommonSseFp_FullFull_To_Full
885 */
886FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
887{
888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
889 if (IEM_IS_MODRM_REG_MODE(bRm))
890 {
891 /*
892 * XMM128, XMM128.
893 */
894 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
896 IEM_MC_LOCAL(X86XMMREG, SseRes);
897 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
898 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
899 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
900 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
901 IEM_MC_PREPARE_SSE_USAGE();
902 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
903 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
904 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
905 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
906
907 IEM_MC_ADVANCE_RIP_AND_FINISH();
908 IEM_MC_END();
909 }
910 else
911 {
912 /*
913 * XMM128, [mem128].
914 */
915 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
916 IEM_MC_LOCAL(X86XMMREG, SseRes);
917 IEM_MC_LOCAL(X86XMMREG, uSrc2);
918 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
919 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
920 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
922
923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
926 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
927
928 IEM_MC_PREPARE_SSE_USAGE();
929 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
930 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
931 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
932
933 IEM_MC_ADVANCE_RIP_AND_FINISH();
934 IEM_MC_END();
935 }
936}
937
938
939/**
940 * Common worker for SSE2 instructions on the forms:
941 * pxxs xmm1, xmm2/mem64
942 *
943 * Proper alignment of the 128-bit operand is enforced.
944 * Exceptions type 3. SSE2 cpuid checks.
945 *
946 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
947 */
948FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
949{
950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
951 if (IEM_IS_MODRM_REG_MODE(bRm))
952 {
953 /*
954 * XMM, XMM.
955 */
956 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
958 IEM_MC_LOCAL(X86XMMREG, SseRes);
959 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
960 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
961 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
962 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
963 IEM_MC_PREPARE_SSE_USAGE();
964 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
965 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
966 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
967 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
968
969 IEM_MC_ADVANCE_RIP_AND_FINISH();
970 IEM_MC_END();
971 }
972 else
973 {
974 /*
975 * XMM, [mem64].
976 */
977 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
978 IEM_MC_LOCAL(X86XMMREG, SseRes);
979 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
980 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
981 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
982 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
984
985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
987 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
988 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
989
990 IEM_MC_PREPARE_SSE_USAGE();
991 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
992 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
993 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
994
995 IEM_MC_ADVANCE_RIP_AND_FINISH();
996 IEM_MC_END();
997 }
998}
999
1000
1001/**
1002 * Common worker for SSE2 instructions on the form:
1003 * pxxxx xmm1, xmm2/mem128
1004 *
1005 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1006 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1007 *
1008 * Exceptions type 4.
1009 */
1010FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1011{
1012 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1013 if (IEM_IS_MODRM_REG_MODE(bRm))
1014 {
1015 /*
1016 * XMM, XMM.
1017 */
1018 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1020 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1021 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1022 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1023 IEM_MC_PREPARE_SSE_USAGE();
1024 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1025 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1026 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1027 IEM_MC_ADVANCE_RIP_AND_FINISH();
1028 IEM_MC_END();
1029 }
1030 else
1031 {
1032 /*
1033 * XMM, [mem128].
1034 */
1035 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1036 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1037 IEM_MC_LOCAL(RTUINT128U, uSrc);
1038 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1040
1041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1043 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1044 /** @todo Most CPUs probably only read the high qword. We read everything to
1045 * make sure we apply segmentation and alignment checks correctly.
1046 * When we have time, it would be interesting to explore what real
1047 * CPUs actually does and whether it will do a TLB load for the lower
1048 * part or skip any associated \#PF. */
1049 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1050
1051 IEM_MC_PREPARE_SSE_USAGE();
1052 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1053 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1054
1055 IEM_MC_ADVANCE_RIP_AND_FINISH();
1056 IEM_MC_END();
1057 }
1058}
1059
1060
1061/**
1062 * Common worker for SSE3 instructions on the forms:
1063 * hxxx xmm1, xmm2/mem128
1064 *
1065 * Proper alignment of the 128-bit operand is enforced.
1066 * Exceptions type 2. SSE3 cpuid checks.
1067 *
1068 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1069 */
1070FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1071{
1072 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1073 if (IEM_IS_MODRM_REG_MODE(bRm))
1074 {
1075 /*
1076 * XMM, XMM.
1077 */
1078 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1080 IEM_MC_LOCAL(X86XMMREG, SseRes);
1081 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
1082 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1083 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1084 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1085 IEM_MC_PREPARE_SSE_USAGE();
1086 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1087 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1088 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1089 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
1090
1091 IEM_MC_ADVANCE_RIP_AND_FINISH();
1092 IEM_MC_END();
1093 }
1094 else
1095 {
1096 /*
1097 * XMM, [mem128].
1098 */
1099 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1100 IEM_MC_LOCAL(X86XMMREG, SseRes);
1101 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1102 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
1103 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1104 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1106
1107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1109 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1110 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1111
1112 IEM_MC_PREPARE_SSE_USAGE();
1113 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1114 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1115 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
1116
1117 IEM_MC_ADVANCE_RIP_AND_FINISH();
1118 IEM_MC_END();
1119 }
1120}
1121
1122
1123/** Opcode 0x0f 0x00 /0. */
1124FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1125{
1126 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1127 IEMOP_HLP_MIN_286();
1128 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1129
1130 if (IEM_IS_MODRM_REG_MODE(bRm))
1131 {
1132 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1133 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1134 iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1135 }
1136
1137 /* Ignore operand size here, memory refs are always 16-bit. */
1138 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1139 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1141 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1142 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1143 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1144 IEM_MC_END();
1145}
1146
1147
1148/** Opcode 0x0f 0x00 /1. */
1149FNIEMOPRM_DEF(iemOp_Grp6_str)
1150{
1151 IEMOP_MNEMONIC(str, "str Rv/Mw");
1152 IEMOP_HLP_MIN_286();
1153 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1154
1155
1156 if (IEM_IS_MODRM_REG_MODE(bRm))
1157 {
1158 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1159 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1160 iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1161 }
1162
1163 /* Ignore operand size here, memory refs are always 16-bit. */
1164 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1165 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1167 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1168 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1169 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1170 IEM_MC_END();
1171}
1172
1173
1174/** Opcode 0x0f 0x00 /2. */
1175FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1176{
1177 IEMOP_MNEMONIC(lldt, "lldt Ew");
1178 IEMOP_HLP_MIN_286();
1179 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1180
1181 if (IEM_IS_MODRM_REG_MODE(bRm))
1182 {
1183 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1184 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1185 IEM_MC_ARG(uint16_t, u16Sel, 0);
1186 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1187 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1188 IEM_MC_END();
1189 }
1190 else
1191 {
1192 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1193 IEM_MC_ARG(uint16_t, u16Sel, 0);
1194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1196 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1197 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1198 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1199 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1200 IEM_MC_END();
1201 }
1202}
1203
1204
1205/** Opcode 0x0f 0x00 /3. */
1206FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1207{
1208 IEMOP_MNEMONIC(ltr, "ltr Ew");
1209 IEMOP_HLP_MIN_286();
1210 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1211
1212 if (IEM_IS_MODRM_REG_MODE(bRm))
1213 {
1214 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1216 IEM_MC_ARG(uint16_t, u16Sel, 0);
1217 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1218 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1219 IEM_MC_END();
1220 }
1221 else
1222 {
1223 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1224 IEM_MC_ARG(uint16_t, u16Sel, 0);
1225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1228 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1229 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1230 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1231 IEM_MC_END();
1232 }
1233}
1234
1235
1236/* Need to associate flag info with the blocks, so duplicate the code. */
1237#define IEMOP_BODY_GRP6_VERX(bRm, fWrite) \
1238 IEMOP_HLP_MIN_286(); \
1239 IEMOP_HLP_NO_REAL_OR_V86_MODE(); \
1240 \
1241 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1242 { \
1243 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0); \
1244 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1245 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1246 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1247 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1248 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1249 IEM_MC_END(); \
1250 } \
1251 else \
1252 { \
1253 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0); \
1254 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1255 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1258 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1259 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1260 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1261 IEM_MC_END(); \
1262 } (void)0
1263
1264/**
1265 * @opmaps grp6
1266 * @opcode /4
1267 * @opflmodify zf
1268 */
1269FNIEMOPRM_DEF(iemOp_Grp6_verr)
1270{
1271 IEMOP_MNEMONIC(verr, "verr Ew");
1272 IEMOP_BODY_GRP6_VERX(bRm, false);
1273}
1274
1275
1276/**
1277 * @opmaps grp6
1278 * @opcode /5
1279 * @opflmodify zf
1280 */
1281FNIEMOPRM_DEF(iemOp_Grp6_verw)
1282{
1283 IEMOP_MNEMONIC(verw, "verw Ew");
1284 IEMOP_BODY_GRP6_VERX(bRm, true);
1285}
1286
1287
1288/**
1289 * Group 6 jump table.
1290 */
1291IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1292{
1293 iemOp_Grp6_sldt,
1294 iemOp_Grp6_str,
1295 iemOp_Grp6_lldt,
1296 iemOp_Grp6_ltr,
1297 iemOp_Grp6_verr,
1298 iemOp_Grp6_verw,
1299 iemOp_InvalidWithRM,
1300 iemOp_InvalidWithRM
1301};
1302
1303/** Opcode 0x0f 0x00. */
1304FNIEMOP_DEF(iemOp_Grp6)
1305{
1306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1307 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1308}
1309
1310
1311/** Opcode 0x0f 0x01 /0. */
1312FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1313{
1314 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1315 IEMOP_HLP_MIN_286();
1316 IEMOP_HLP_64BIT_OP_SIZE();
1317 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1318 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1321 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1322 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1323 IEM_MC_END();
1324}
1325
1326
1327/** Opcode 0x0f 0x01 /0. */
1328FNIEMOP_DEF(iemOp_Grp7_vmcall)
1329{
1330 IEMOP_MNEMONIC(vmcall, "vmcall");
1331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1332
1333 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1334 want all hypercalls regardless of instruction used, and if a
1335 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1336 (NEM/win makes ASSUMPTIONS about this behavior.) */
1337 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmcall);
1338}
1339
1340
1341/** Opcode 0x0f 0x01 /0. */
1342#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1343FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1344{
1345 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1346 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1347 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1348 IEMOP_HLP_DONE_DECODING();
1349 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1350 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1351 iemCImpl_vmlaunch);
1352}
1353#else
1354FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1355{
1356 IEMOP_BITCH_ABOUT_STUB();
1357 IEMOP_RAISE_INVALID_OPCODE_RET();
1358}
1359#endif
1360
1361
1362/** Opcode 0x0f 0x01 /0. */
1363#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1364FNIEMOP_DEF(iemOp_Grp7_vmresume)
1365{
1366 IEMOP_MNEMONIC(vmresume, "vmresume");
1367 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1368 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1369 IEMOP_HLP_DONE_DECODING();
1370 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1371 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1372 iemCImpl_vmresume);
1373}
1374#else
1375FNIEMOP_DEF(iemOp_Grp7_vmresume)
1376{
1377 IEMOP_BITCH_ABOUT_STUB();
1378 IEMOP_RAISE_INVALID_OPCODE_RET();
1379}
1380#endif
1381
1382
1383/** Opcode 0x0f 0x01 /0. */
1384#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1385FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1386{
1387 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1388 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1389 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1390 IEMOP_HLP_DONE_DECODING();
1391 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmxoff);
1392}
1393#else
1394FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1395{
1396 IEMOP_BITCH_ABOUT_STUB();
1397 IEMOP_RAISE_INVALID_OPCODE_RET();
1398}
1399#endif
1400
1401
1402/** Opcode 0x0f 0x01 /1. */
1403FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1404{
1405 IEMOP_MNEMONIC(sidt, "sidt Ms");
1406 IEMOP_HLP_MIN_286();
1407 IEMOP_HLP_64BIT_OP_SIZE();
1408 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1409 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1412 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1413 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1414 IEM_MC_END();
1415}
1416
1417
1418/** Opcode 0x0f 0x01 /1. */
1419FNIEMOP_DEF(iemOp_Grp7_monitor)
1420{
1421 IEMOP_MNEMONIC(monitor, "monitor");
1422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1423 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1424}
1425
1426
1427/** Opcode 0x0f 0x01 /1. */
1428FNIEMOP_DEF(iemOp_Grp7_mwait)
1429{
1430 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1432 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_mwait);
1433}
1434
1435
1436/** Opcode 0x0f 0x01 /2. */
1437FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1438{
1439 IEMOP_MNEMONIC(lgdt, "lgdt");
1440 IEMOP_HLP_64BIT_OP_SIZE();
1441 IEM_MC_BEGIN(0, 0);
1442 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1445 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1446 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1447 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1448 IEM_MC_END();
1449}
1450
1451
1452/** Opcode 0x0f 0x01 0xd0. */
1453FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1454{
1455 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1456 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1457 {
1458 /** @todo r=ramshankar: We should use
1459 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1460 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1461/** @todo testcase: test prefixes and exceptions. currently not checking for the
1462 * OPSIZE one ... */
1463 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1464 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
1465 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1466 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
1467 iemCImpl_xgetbv);
1468 }
1469 IEMOP_RAISE_INVALID_OPCODE_RET();
1470}
1471
1472
1473/** Opcode 0x0f 0x01 0xd1. */
1474FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1475{
1476 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1477 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1478 {
1479 /** @todo r=ramshankar: We should use
1480 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1481 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1482/** @todo testcase: test prefixes and exceptions. currently not checking for the
1483 * OPSIZE one ... */
1484 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1485 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_xsetbv);
1486 }
1487 IEMOP_RAISE_INVALID_OPCODE_RET();
1488}
1489
1490
1491/** Opcode 0x0f 0x01 /3. */
1492FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1493{
1494 IEMOP_MNEMONIC(lidt, "lidt");
1495 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1496 IEM_MC_BEGIN(0, 0);
1497 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1501 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg, /*=*/ enmEffOpSize, 2);
1502 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1503 IEM_MC_END();
1504}
1505
1506
1507/** Opcode 0x0f 0x01 0xd8. */
1508#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1509FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1510{
1511 IEMOP_MNEMONIC(vmrun, "vmrun");
1512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1513 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1514 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1515 iemCImpl_vmrun);
1516}
1517#else
1518FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1519#endif
1520
1521/** Opcode 0x0f 0x01 0xd9. */
1522FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1523{
1524 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1525 /** @todo r=bird: Table A-8 on page 524 in vol 3 has VMGEXIT for this
1526 * opcode sequence when F3 or F2 is used as prefix. So, the assumtion
1527 * here cannot be right... */
1528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1529
1530 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1531 want all hypercalls regardless of instruction used, and if a
1532 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1533 (NEM/win makes ASSUMPTIONS about this behavior.) */
1534 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmmcall);
1535}
1536
1537/** Opcode 0x0f 0x01 0xda. */
1538#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1539FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1540{
1541 IEMOP_MNEMONIC(vmload, "vmload");
1542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1543 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmload);
1544}
1545#else
1546FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1547#endif
1548
1549
1550/** Opcode 0x0f 0x01 0xdb. */
1551#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1552FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1553{
1554 IEMOP_MNEMONIC(vmsave, "vmsave");
1555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1556 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmsave);
1557}
1558#else
1559FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1560#endif
1561
1562
1563/** Opcode 0x0f 0x01 0xdc. */
1564#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1565FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1566{
1567 IEMOP_MNEMONIC(stgi, "stgi");
1568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1569 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_stgi);
1570}
1571#else
1572FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1573#endif
1574
1575
1576/** Opcode 0x0f 0x01 0xdd. */
1577#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1578FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1579{
1580 IEMOP_MNEMONIC(clgi, "clgi");
1581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1582 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clgi);
1583}
1584#else
1585FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1586#endif
1587
1588
1589/** Opcode 0x0f 0x01 0xdf. */
1590#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1591FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1592{
1593 IEMOP_MNEMONIC(invlpga, "invlpga");
1594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1595 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpga);
1596}
1597#else
1598FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1599#endif
1600
1601
1602/** Opcode 0x0f 0x01 0xde. */
1603#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1604FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1605{
1606 IEMOP_MNEMONIC(skinit, "skinit");
1607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1608 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_skinit);
1609}
1610#else
1611FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1612#endif
1613
1614
1615/** Opcode 0x0f 0x01 /4. */
1616FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1617{
1618 IEMOP_MNEMONIC(smsw, "smsw");
1619 IEMOP_HLP_MIN_286();
1620 if (IEM_IS_MODRM_REG_MODE(bRm))
1621 {
1622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1623 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1624 iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1625 }
1626
1627 /* Ignore operand size here, memory refs are always 16-bit. */
1628 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1629 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1632 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1633 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1634 IEM_MC_END();
1635}
1636
1637
1638/** Opcode 0x0f 0x01 /6. */
1639FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1640{
1641 /* The operand size is effectively ignored, all is 16-bit and only the
1642 lower 3-bits are used. */
1643 IEMOP_MNEMONIC(lmsw, "lmsw");
1644 IEMOP_HLP_MIN_286();
1645 if (IEM_IS_MODRM_REG_MODE(bRm))
1646 {
1647 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1649 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1650 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1651 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1652 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1653 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1654 IEM_MC_END();
1655 }
1656 else
1657 {
1658 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1659 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1660 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1663 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1664 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1665 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1666 IEM_MC_END();
1667 }
1668}
1669
1670
1671/** Opcode 0x0f 0x01 /7. */
1672FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1673{
1674 IEMOP_MNEMONIC(invlpg, "invlpg");
1675 IEMOP_HLP_MIN_486();
1676 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1677 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1680 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpg, GCPtrEffDst);
1681 IEM_MC_END();
1682}
1683
1684
1685/** Opcode 0x0f 0x01 0xf8. */
1686FNIEMOP_DEF(iemOp_Grp7_swapgs)
1687{
1688 IEMOP_MNEMONIC(swapgs, "swapgs");
1689 IEMOP_HLP_ONLY_64BIT();
1690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1691 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS), iemCImpl_swapgs);
1692}
1693
1694
1695/** Opcode 0x0f 0x01 0xf9. */
1696FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1697{
1698 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1700 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
1701 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1702 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
1703 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
1704 iemCImpl_rdtscp);
1705}
1706
1707
1708/**
1709 * Group 7 jump table, memory variant.
1710 */
1711IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1712{
1713 iemOp_Grp7_sgdt,
1714 iemOp_Grp7_sidt,
1715 iemOp_Grp7_lgdt,
1716 iemOp_Grp7_lidt,
1717 iemOp_Grp7_smsw,
1718 iemOp_InvalidWithRM,
1719 iemOp_Grp7_lmsw,
1720 iemOp_Grp7_invlpg
1721};
1722
1723
1724/** Opcode 0x0f 0x01. */
1725FNIEMOP_DEF(iemOp_Grp7)
1726{
1727 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1728 if (IEM_IS_MODRM_MEM_MODE(bRm))
1729 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1730
1731 switch (IEM_GET_MODRM_REG_8(bRm))
1732 {
1733 case 0:
1734 switch (IEM_GET_MODRM_RM_8(bRm))
1735 {
1736 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1737 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1738 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1739 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1740 }
1741 IEMOP_RAISE_INVALID_OPCODE_RET();
1742
1743 case 1:
1744 switch (IEM_GET_MODRM_RM_8(bRm))
1745 {
1746 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1747 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1748 }
1749 IEMOP_RAISE_INVALID_OPCODE_RET();
1750
1751 case 2:
1752 switch (IEM_GET_MODRM_RM_8(bRm))
1753 {
1754 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1755 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1756 }
1757 IEMOP_RAISE_INVALID_OPCODE_RET();
1758
1759 case 3:
1760 switch (IEM_GET_MODRM_RM_8(bRm))
1761 {
1762 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1763 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1764 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1765 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1766 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1767 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1768 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1769 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1770 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1771 }
1772
1773 case 4:
1774 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1775
1776 case 5:
1777 IEMOP_RAISE_INVALID_OPCODE_RET();
1778
1779 case 6:
1780 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1781
1782 case 7:
1783 switch (IEM_GET_MODRM_RM_8(bRm))
1784 {
1785 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1786 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1787 }
1788 IEMOP_RAISE_INVALID_OPCODE_RET();
1789
1790 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1791 }
1792}
1793
1794FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1795{
1796 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1798
1799 if (IEM_IS_MODRM_REG_MODE(bRm))
1800 {
1801 switch (pVCpu->iem.s.enmEffOpSize)
1802 {
1803 case IEMMODE_16BIT:
1804 IEM_MC_BEGIN(0, 0);
1805 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1806 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1807 IEM_MC_ARG(uint16_t, u16Sel, 1);
1808 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1809
1810 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1811 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1812 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1813 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1814
1815 IEM_MC_END();
1816 break;
1817
1818 case IEMMODE_32BIT:
1819 case IEMMODE_64BIT:
1820 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1821 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1822 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1823 IEM_MC_ARG(uint16_t, u16Sel, 1);
1824 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1825
1826 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1827 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1828 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1829 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1830
1831 IEM_MC_END();
1832 break;
1833
1834 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1835 }
1836 }
1837 else
1838 {
1839 switch (pVCpu->iem.s.enmEffOpSize)
1840 {
1841 case IEMMODE_16BIT:
1842 IEM_MC_BEGIN(0, 0);
1843 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1844 IEM_MC_ARG(uint16_t, u16Sel, 1);
1845 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1847
1848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1849 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1850
1851 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1852 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1853 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1854 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1855
1856 IEM_MC_END();
1857 break;
1858
1859 case IEMMODE_32BIT:
1860 case IEMMODE_64BIT:
1861 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1862 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1863 IEM_MC_ARG(uint16_t, u16Sel, 1);
1864 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1866
1867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1868 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1869/** @todo testcase: make sure it's a 16-bit read. */
1870
1871 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1872 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1873 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1874 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1875
1876 IEM_MC_END();
1877 break;
1878
1879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1880 }
1881 }
1882}
1883
1884
1885
1886/**
1887 * @opcode 0x02
1888 * @opflmodify zf
1889 */
1890FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1891{
1892 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1893 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1894}
1895
1896
1897/**
1898 * @opcode 0x03
1899 * @opflmodify zf
1900 */
1901FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1902{
1903 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1904 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1905}
1906
1907
1908/** Opcode 0x0f 0x05. */
1909FNIEMOP_DEF(iemOp_syscall)
1910{
1911 if (RT_LIKELY(pVCpu->iem.s.uTargetCpu != IEMTARGETCPU_286))
1912 {
1913 IEMOP_MNEMONIC(syscall, "syscall");
1914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1915 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1916 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0, iemCImpl_syscall);
1917 }
1918 else
1919 {
1920 IEMOP_MNEMONIC(loadall286, "loadall286");
1921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1922 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1923 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1924 RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_loadall286);
1925 }
1926}
1927
1928
1929/** Opcode 0x0f 0x06. */
1930FNIEMOP_DEF(iemOp_clts)
1931{
1932 IEMOP_MNEMONIC(clts, "clts");
1933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1934 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_clts);
1935}
1936
1937
1938/** Opcode 0x0f 0x07. */
1939FNIEMOP_DEF(iemOp_sysret)
1940{
1941 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1943 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1944 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
1945 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
1946}
1947
1948
1949/** Opcode 0x0f 0x08. */
1950FNIEMOP_DEF(iemOp_invd)
1951{
1952 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1953 IEMOP_HLP_MIN_486();
1954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1955 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invd);
1956}
1957
1958
1959/** Opcode 0x0f 0x09. */
1960FNIEMOP_DEF(iemOp_wbinvd)
1961{
1962 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1963 IEMOP_HLP_MIN_486();
1964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1965 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wbinvd);
1966}
1967
1968
1969/** Opcode 0x0f 0x0b. */
1970FNIEMOP_DEF(iemOp_ud2)
1971{
1972 IEMOP_MNEMONIC(ud2, "ud2");
1973 IEMOP_RAISE_INVALID_OPCODE_RET();
1974}
1975
1976/** Opcode 0x0f 0x0d. */
1977FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1978{
1979 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1980 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1981 {
1982 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1983 IEMOP_RAISE_INVALID_OPCODE_RET();
1984 }
1985
1986 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1987 if (IEM_IS_MODRM_REG_MODE(bRm))
1988 {
1989 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1990 IEMOP_RAISE_INVALID_OPCODE_RET();
1991 }
1992
1993 switch (IEM_GET_MODRM_REG_8(bRm))
1994 {
1995 case 2: /* Aliased to /0 for the time being. */
1996 case 4: /* Aliased to /0 for the time being. */
1997 case 5: /* Aliased to /0 for the time being. */
1998 case 6: /* Aliased to /0 for the time being. */
1999 case 7: /* Aliased to /0 for the time being. */
2000 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2001 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2002 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2003 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2004 }
2005
2006 IEM_MC_BEGIN(0, 0);
2007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2010 /* Currently a NOP. */
2011 IEM_MC_NOREF(GCPtrEffSrc);
2012 IEM_MC_ADVANCE_RIP_AND_FINISH();
2013 IEM_MC_END();
2014}
2015
2016
2017/** Opcode 0x0f 0x0e. */
2018FNIEMOP_DEF(iemOp_femms)
2019{
2020 IEMOP_MNEMONIC(femms, "femms");
2021
2022 IEM_MC_BEGIN(0, 0);
2023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2024 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2025 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2026 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2027 IEM_MC_FPU_FROM_MMX_MODE();
2028 IEM_MC_ADVANCE_RIP_AND_FINISH();
2029 IEM_MC_END();
2030}
2031
2032
2033/** Opcode 0x0f 0x0f. */
2034FNIEMOP_DEF(iemOp_3Dnow)
2035{
2036 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2037 {
2038 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2039 IEMOP_RAISE_INVALID_OPCODE_RET();
2040 }
2041
2042#ifdef IEM_WITH_3DNOW
2043 /* This is pretty sparse, use switch instead of table. */
2044 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2045 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2046#else
2047 IEMOP_BITCH_ABOUT_STUB();
2048 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2049#endif
2050}
2051
2052
2053/**
2054 * @opcode 0x10
2055 * @oppfx none
2056 * @opcpuid sse
2057 * @opgroup og_sse_simdfp_datamove
2058 * @opxcpttype 4UA
2059 * @optest op1=1 op2=2 -> op1=2
2060 * @optest op1=0 op2=-22 -> op1=-22
2061 */
2062FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2063{
2064 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2066 if (IEM_IS_MODRM_REG_MODE(bRm))
2067 {
2068 /*
2069 * XMM128, XMM128.
2070 */
2071 IEM_MC_BEGIN(0, 0);
2072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2073 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2074 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2075 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2076 IEM_GET_MODRM_RM(pVCpu, bRm));
2077 IEM_MC_ADVANCE_RIP_AND_FINISH();
2078 IEM_MC_END();
2079 }
2080 else
2081 {
2082 /*
2083 * XMM128, [mem128].
2084 */
2085 IEM_MC_BEGIN(0, 0);
2086 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2088
2089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2091 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2092 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2093
2094 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2095 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2096
2097 IEM_MC_ADVANCE_RIP_AND_FINISH();
2098 IEM_MC_END();
2099 }
2100
2101}
2102
2103
2104/**
2105 * @opcode 0x10
2106 * @oppfx 0x66
2107 * @opcpuid sse2
2108 * @opgroup og_sse2_pcksclr_datamove
2109 * @opxcpttype 4UA
2110 * @optest op1=1 op2=2 -> op1=2
2111 * @optest op1=0 op2=-42 -> op1=-42
2112 */
2113FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2114{
2115 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2117 if (IEM_IS_MODRM_REG_MODE(bRm))
2118 {
2119 /*
2120 * XMM128, XMM128.
2121 */
2122 IEM_MC_BEGIN(0, 0);
2123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2124 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2125 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2126 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2127 IEM_GET_MODRM_RM(pVCpu, bRm));
2128 IEM_MC_ADVANCE_RIP_AND_FINISH();
2129 IEM_MC_END();
2130 }
2131 else
2132 {
2133 /*
2134 * XMM128, [mem128].
2135 */
2136 IEM_MC_BEGIN(0, 0);
2137 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2139
2140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2142 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2143 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2144
2145 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2146 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2147
2148 IEM_MC_ADVANCE_RIP_AND_FINISH();
2149 IEM_MC_END();
2150 }
2151}
2152
2153
2154/**
2155 * @opcode 0x10
2156 * @oppfx 0xf3
2157 * @opcpuid sse
2158 * @opgroup og_sse_simdfp_datamove
2159 * @opxcpttype 5
2160 * @optest op1=1 op2=2 -> op1=2
2161 * @optest op1=0 op2=-22 -> op1=-22
2162 */
2163FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2164{
2165 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2167 if (IEM_IS_MODRM_REG_MODE(bRm))
2168 {
2169 /*
2170 * XMM32, XMM32.
2171 */
2172 IEM_MC_BEGIN(0, 0);
2173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2174 IEM_MC_LOCAL(uint32_t, uSrc);
2175
2176 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2177 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2178 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2179 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2180
2181 IEM_MC_ADVANCE_RIP_AND_FINISH();
2182 IEM_MC_END();
2183 }
2184 else
2185 {
2186 /*
2187 * XMM128, [mem32].
2188 */
2189 IEM_MC_BEGIN(0, 0);
2190 IEM_MC_LOCAL(uint32_t, uSrc);
2191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2192
2193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2195 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2196 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2197
2198 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2199 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2200
2201 IEM_MC_ADVANCE_RIP_AND_FINISH();
2202 IEM_MC_END();
2203 }
2204}
2205
2206
2207/**
2208 * @opcode 0x10
2209 * @oppfx 0xf2
2210 * @opcpuid sse2
2211 * @opgroup og_sse2_pcksclr_datamove
2212 * @opxcpttype 5
2213 * @optest op1=1 op2=2 -> op1=2
2214 * @optest op1=0 op2=-42 -> op1=-42
2215 */
2216FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2217{
2218 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2220 if (IEM_IS_MODRM_REG_MODE(bRm))
2221 {
2222 /*
2223 * XMM64, XMM64.
2224 */
2225 IEM_MC_BEGIN(0, 0);
2226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2227 IEM_MC_LOCAL(uint64_t, uSrc);
2228
2229 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2230 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2231 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2232 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2233
2234 IEM_MC_ADVANCE_RIP_AND_FINISH();
2235 IEM_MC_END();
2236 }
2237 else
2238 {
2239 /*
2240 * XMM128, [mem64].
2241 */
2242 IEM_MC_BEGIN(0, 0);
2243 IEM_MC_LOCAL(uint64_t, uSrc);
2244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2245
2246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2248 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2249 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2250
2251 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2252 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2253
2254 IEM_MC_ADVANCE_RIP_AND_FINISH();
2255 IEM_MC_END();
2256 }
2257}
2258
2259
2260/**
2261 * @opcode 0x11
2262 * @oppfx none
2263 * @opcpuid sse
2264 * @opgroup og_sse_simdfp_datamove
2265 * @opxcpttype 4UA
2266 * @optest op1=1 op2=2 -> op1=2
2267 * @optest op1=0 op2=-42 -> op1=-42
2268 */
2269FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2270{
2271 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2272 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2273 if (IEM_IS_MODRM_REG_MODE(bRm))
2274 {
2275 /*
2276 * XMM128, XMM128.
2277 */
2278 IEM_MC_BEGIN(0, 0);
2279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2280 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2281 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2282 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2283 IEM_GET_MODRM_REG(pVCpu, bRm));
2284 IEM_MC_ADVANCE_RIP_AND_FINISH();
2285 IEM_MC_END();
2286 }
2287 else
2288 {
2289 /*
2290 * [mem128], XMM128.
2291 */
2292 IEM_MC_BEGIN(0, 0);
2293 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2295
2296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2298 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2299 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2300
2301 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2302 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2303
2304 IEM_MC_ADVANCE_RIP_AND_FINISH();
2305 IEM_MC_END();
2306 }
2307}
2308
2309
2310/**
2311 * @opcode 0x11
2312 * @oppfx 0x66
2313 * @opcpuid sse2
2314 * @opgroup og_sse2_pcksclr_datamove
2315 * @opxcpttype 4UA
2316 * @optest op1=1 op2=2 -> op1=2
2317 * @optest op1=0 op2=-42 -> op1=-42
2318 */
2319FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2320{
2321 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2322 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2323 if (IEM_IS_MODRM_REG_MODE(bRm))
2324 {
2325 /*
2326 * XMM128, XMM128.
2327 */
2328 IEM_MC_BEGIN(0, 0);
2329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2330 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2331 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2332 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2333 IEM_GET_MODRM_REG(pVCpu, bRm));
2334 IEM_MC_ADVANCE_RIP_AND_FINISH();
2335 IEM_MC_END();
2336 }
2337 else
2338 {
2339 /*
2340 * [mem128], XMM128.
2341 */
2342 IEM_MC_BEGIN(0, 0);
2343 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2345
2346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2348 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2349 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2350
2351 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2352 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2353
2354 IEM_MC_ADVANCE_RIP_AND_FINISH();
2355 IEM_MC_END();
2356 }
2357}
2358
2359
2360/**
2361 * @opcode 0x11
2362 * @oppfx 0xf3
2363 * @opcpuid sse
2364 * @opgroup og_sse_simdfp_datamove
2365 * @opxcpttype 5
2366 * @optest op1=1 op2=2 -> op1=2
2367 * @optest op1=0 op2=-22 -> op1=-22
2368 */
2369FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2370{
2371 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2372 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2373 if (IEM_IS_MODRM_REG_MODE(bRm))
2374 {
2375 /*
2376 * XMM32, XMM32.
2377 */
2378 IEM_MC_BEGIN(0, 0);
2379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2380 IEM_MC_LOCAL(uint32_t, uSrc);
2381
2382 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2383 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2384 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2385 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2386
2387 IEM_MC_ADVANCE_RIP_AND_FINISH();
2388 IEM_MC_END();
2389 }
2390 else
2391 {
2392 /*
2393 * [mem32], XMM32.
2394 */
2395 IEM_MC_BEGIN(0, 0);
2396 IEM_MC_LOCAL(uint32_t, uSrc);
2397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2398
2399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2401 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2402 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2403
2404 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2405 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2406
2407 IEM_MC_ADVANCE_RIP_AND_FINISH();
2408 IEM_MC_END();
2409 }
2410}
2411
2412
2413/**
2414 * @opcode 0x11
2415 * @oppfx 0xf2
2416 * @opcpuid sse2
2417 * @opgroup og_sse2_pcksclr_datamove
2418 * @opxcpttype 5
2419 * @optest op1=1 op2=2 -> op1=2
2420 * @optest op1=0 op2=-42 -> op1=-42
2421 */
2422FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2423{
2424 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2425 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2426 if (IEM_IS_MODRM_REG_MODE(bRm))
2427 {
2428 /*
2429 * XMM64, XMM64.
2430 */
2431 IEM_MC_BEGIN(0, 0);
2432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2433 IEM_MC_LOCAL(uint64_t, uSrc);
2434
2435 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2436 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2437 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2438 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2439
2440 IEM_MC_ADVANCE_RIP_AND_FINISH();
2441 IEM_MC_END();
2442 }
2443 else
2444 {
2445 /*
2446 * [mem64], XMM64.
2447 */
2448 IEM_MC_BEGIN(0, 0);
2449 IEM_MC_LOCAL(uint64_t, uSrc);
2450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2451
2452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2454 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2455 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2456
2457 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2458 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2459
2460 IEM_MC_ADVANCE_RIP_AND_FINISH();
2461 IEM_MC_END();
2462 }
2463}
2464
2465
2466FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2467{
2468 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2469 if (IEM_IS_MODRM_REG_MODE(bRm))
2470 {
2471 /**
2472 * @opcode 0x12
2473 * @opcodesub 11 mr/reg
2474 * @oppfx none
2475 * @opcpuid sse
2476 * @opgroup og_sse_simdfp_datamove
2477 * @opxcpttype 5
2478 * @optest op1=1 op2=2 -> op1=2
2479 * @optest op1=0 op2=-42 -> op1=-42
2480 */
2481 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2482
2483 IEM_MC_BEGIN(0, 0);
2484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2485 IEM_MC_LOCAL(uint64_t, uSrc);
2486
2487 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2488 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2489 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2490 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2491
2492 IEM_MC_ADVANCE_RIP_AND_FINISH();
2493 IEM_MC_END();
2494 }
2495 else
2496 {
2497 /**
2498 * @opdone
2499 * @opcode 0x12
2500 * @opcodesub !11 mr/reg
2501 * @oppfx none
2502 * @opcpuid sse
2503 * @opgroup og_sse_simdfp_datamove
2504 * @opxcpttype 5
2505 * @optest op1=1 op2=2 -> op1=2
2506 * @optest op1=0 op2=-42 -> op1=-42
2507 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2508 */
2509 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2510
2511 IEM_MC_BEGIN(0, 0);
2512 IEM_MC_LOCAL(uint64_t, uSrc);
2513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2514
2515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2517 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2518 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2519
2520 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2521 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2522
2523 IEM_MC_ADVANCE_RIP_AND_FINISH();
2524 IEM_MC_END();
2525 }
2526}
2527
2528
2529/**
2530 * @opcode 0x12
2531 * @opcodesub !11 mr/reg
2532 * @oppfx 0x66
2533 * @opcpuid sse2
2534 * @opgroup og_sse2_pcksclr_datamove
2535 * @opxcpttype 5
2536 * @optest op1=1 op2=2 -> op1=2
2537 * @optest op1=0 op2=-42 -> op1=-42
2538 */
2539FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2540{
2541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2542 if (IEM_IS_MODRM_MEM_MODE(bRm))
2543 {
2544 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2545
2546 IEM_MC_BEGIN(0, 0);
2547 IEM_MC_LOCAL(uint64_t, uSrc);
2548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2549
2550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2552 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2553 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2554
2555 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2556 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2557
2558 IEM_MC_ADVANCE_RIP_AND_FINISH();
2559 IEM_MC_END();
2560 }
2561
2562 /**
2563 * @opdone
2564 * @opmnemonic ud660f12m3
2565 * @opcode 0x12
2566 * @opcodesub 11 mr/reg
2567 * @oppfx 0x66
2568 * @opunused immediate
2569 * @opcpuid sse
2570 * @optest ->
2571 */
2572 else
2573 IEMOP_RAISE_INVALID_OPCODE_RET();
2574}
2575
2576
2577/**
2578 * @opcode 0x12
2579 * @oppfx 0xf3
2580 * @opcpuid sse3
2581 * @opgroup og_sse3_pcksclr_datamove
2582 * @opxcpttype 4
2583 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2584 * op1=0x00000002000000020000000100000001
2585 */
2586FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2587{
2588 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2589 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2590 if (IEM_IS_MODRM_REG_MODE(bRm))
2591 {
2592 /*
2593 * XMM, XMM.
2594 */
2595 IEM_MC_BEGIN(0, 0);
2596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2597 IEM_MC_LOCAL(RTUINT128U, uSrc);
2598
2599 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2600 IEM_MC_PREPARE_SSE_USAGE();
2601
2602 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2603 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2604 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2605 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2606 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2607
2608 IEM_MC_ADVANCE_RIP_AND_FINISH();
2609 IEM_MC_END();
2610 }
2611 else
2612 {
2613 /*
2614 * XMM, [mem128].
2615 */
2616 IEM_MC_BEGIN(0, 0);
2617 IEM_MC_LOCAL(RTUINT128U, uSrc);
2618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2619
2620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2622 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2623 IEM_MC_PREPARE_SSE_USAGE();
2624
2625 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2626 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2627 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2628 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2629 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2630
2631 IEM_MC_ADVANCE_RIP_AND_FINISH();
2632 IEM_MC_END();
2633 }
2634}
2635
2636
2637/**
2638 * @opcode 0x12
2639 * @oppfx 0xf2
2640 * @opcpuid sse3
2641 * @opgroup og_sse3_pcksclr_datamove
2642 * @opxcpttype 5
2643 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2644 * op1=0x22222222111111112222222211111111
2645 */
2646FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2647{
2648 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2649 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2650 if (IEM_IS_MODRM_REG_MODE(bRm))
2651 {
2652 /*
2653 * XMM128, XMM64.
2654 */
2655 IEM_MC_BEGIN(0, 0);
2656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2657 IEM_MC_LOCAL(uint64_t, uSrc);
2658
2659 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2660 IEM_MC_PREPARE_SSE_USAGE();
2661
2662 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2663 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2664 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2665
2666 IEM_MC_ADVANCE_RIP_AND_FINISH();
2667 IEM_MC_END();
2668 }
2669 else
2670 {
2671 /*
2672 * XMM128, [mem64].
2673 */
2674 IEM_MC_BEGIN(0, 0);
2675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2676 IEM_MC_LOCAL(uint64_t, uSrc);
2677
2678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2680 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2681 IEM_MC_PREPARE_SSE_USAGE();
2682
2683 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2684 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2685 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2686
2687 IEM_MC_ADVANCE_RIP_AND_FINISH();
2688 IEM_MC_END();
2689 }
2690}
2691
2692
2693/**
2694 * @opcode 0x13
2695 * @opcodesub !11 mr/reg
2696 * @oppfx none
2697 * @opcpuid sse
2698 * @opgroup og_sse_simdfp_datamove
2699 * @opxcpttype 5
2700 * @optest op1=1 op2=2 -> op1=2
2701 * @optest op1=0 op2=-42 -> op1=-42
2702 */
2703FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2704{
2705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2706 if (IEM_IS_MODRM_MEM_MODE(bRm))
2707 {
2708 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2709
2710 IEM_MC_BEGIN(0, 0);
2711 IEM_MC_LOCAL(uint64_t, uSrc);
2712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2713
2714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2716 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2717 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2718
2719 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2720 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2721
2722 IEM_MC_ADVANCE_RIP_AND_FINISH();
2723 IEM_MC_END();
2724 }
2725
2726 /**
2727 * @opdone
2728 * @opmnemonic ud0f13m3
2729 * @opcode 0x13
2730 * @opcodesub 11 mr/reg
2731 * @oppfx none
2732 * @opunused immediate
2733 * @opcpuid sse
2734 * @optest ->
2735 */
2736 else
2737 IEMOP_RAISE_INVALID_OPCODE_RET();
2738}
2739
2740
2741/**
2742 * @opcode 0x13
2743 * @opcodesub !11 mr/reg
2744 * @oppfx 0x66
2745 * @opcpuid sse2
2746 * @opgroup og_sse2_pcksclr_datamove
2747 * @opxcpttype 5
2748 * @optest op1=1 op2=2 -> op1=2
2749 * @optest op1=0 op2=-42 -> op1=-42
2750 */
2751FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2752{
2753 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2754 if (IEM_IS_MODRM_MEM_MODE(bRm))
2755 {
2756 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2757
2758 IEM_MC_BEGIN(0, 0);
2759 IEM_MC_LOCAL(uint64_t, uSrc);
2760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2761
2762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2764 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2765 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2766
2767 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2768 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2769
2770 IEM_MC_ADVANCE_RIP_AND_FINISH();
2771 IEM_MC_END();
2772 }
2773
2774 /**
2775 * @opdone
2776 * @opmnemonic ud660f13m3
2777 * @opcode 0x13
2778 * @opcodesub 11 mr/reg
2779 * @oppfx 0x66
2780 * @opunused immediate
2781 * @opcpuid sse
2782 * @optest ->
2783 */
2784 else
2785 IEMOP_RAISE_INVALID_OPCODE_RET();
2786}
2787
2788
2789/**
2790 * @opmnemonic udf30f13
2791 * @opcode 0x13
2792 * @oppfx 0xf3
2793 * @opunused intel-modrm
2794 * @opcpuid sse
2795 * @optest ->
2796 * @opdone
2797 */
2798
2799/**
2800 * @opmnemonic udf20f13
2801 * @opcode 0x13
2802 * @oppfx 0xf2
2803 * @opunused intel-modrm
2804 * @opcpuid sse
2805 * @optest ->
2806 * @opdone
2807 */
2808
2809/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2810FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2811{
2812 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2813 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2814}
2815
2816
2817/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2818FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2819{
2820 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2821 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2822}
2823
2824
2825/**
2826 * @opdone
2827 * @opmnemonic udf30f14
2828 * @opcode 0x14
2829 * @oppfx 0xf3
2830 * @opunused intel-modrm
2831 * @opcpuid sse
2832 * @optest ->
2833 * @opdone
2834 */
2835
2836/**
2837 * @opmnemonic udf20f14
2838 * @opcode 0x14
2839 * @oppfx 0xf2
2840 * @opunused intel-modrm
2841 * @opcpuid sse
2842 * @optest ->
2843 * @opdone
2844 */
2845
2846/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2847FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2848{
2849 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2850 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2851}
2852
2853
2854/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2855FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2856{
2857 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2858 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2859}
2860
2861
2862/* Opcode 0xf3 0x0f 0x15 - invalid */
2863/* Opcode 0xf2 0x0f 0x15 - invalid */
2864
2865/**
2866 * @opdone
2867 * @opmnemonic udf30f15
2868 * @opcode 0x15
2869 * @oppfx 0xf3
2870 * @opunused intel-modrm
2871 * @opcpuid sse
2872 * @optest ->
2873 * @opdone
2874 */
2875
2876/**
2877 * @opmnemonic udf20f15
2878 * @opcode 0x15
2879 * @oppfx 0xf2
2880 * @opunused intel-modrm
2881 * @opcpuid sse
2882 * @optest ->
2883 * @opdone
2884 */
2885
2886FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2887{
2888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2889 if (IEM_IS_MODRM_REG_MODE(bRm))
2890 {
2891 /**
2892 * @opcode 0x16
2893 * @opcodesub 11 mr/reg
2894 * @oppfx none
2895 * @opcpuid sse
2896 * @opgroup og_sse_simdfp_datamove
2897 * @opxcpttype 5
2898 * @optest op1=1 op2=2 -> op1=2
2899 * @optest op1=0 op2=-42 -> op1=-42
2900 */
2901 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2902
2903 IEM_MC_BEGIN(0, 0);
2904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2905 IEM_MC_LOCAL(uint64_t, uSrc);
2906
2907 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2908 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2909 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2910 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2911
2912 IEM_MC_ADVANCE_RIP_AND_FINISH();
2913 IEM_MC_END();
2914 }
2915 else
2916 {
2917 /**
2918 * @opdone
2919 * @opcode 0x16
2920 * @opcodesub !11 mr/reg
2921 * @oppfx none
2922 * @opcpuid sse
2923 * @opgroup og_sse_simdfp_datamove
2924 * @opxcpttype 5
2925 * @optest op1=1 op2=2 -> op1=2
2926 * @optest op1=0 op2=-42 -> op1=-42
2927 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2928 */
2929 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2930
2931 IEM_MC_BEGIN(0, 0);
2932 IEM_MC_LOCAL(uint64_t, uSrc);
2933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2934
2935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2937 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2938 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2939
2940 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2941 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2942
2943 IEM_MC_ADVANCE_RIP_AND_FINISH();
2944 IEM_MC_END();
2945 }
2946}
2947
2948
2949/**
2950 * @opcode 0x16
2951 * @opcodesub !11 mr/reg
2952 * @oppfx 0x66
2953 * @opcpuid sse2
2954 * @opgroup og_sse2_pcksclr_datamove
2955 * @opxcpttype 5
2956 * @optest op1=1 op2=2 -> op1=2
2957 * @optest op1=0 op2=-42 -> op1=-42
2958 */
2959FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2960{
2961 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2962 if (IEM_IS_MODRM_MEM_MODE(bRm))
2963 {
2964 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2965
2966 IEM_MC_BEGIN(0, 0);
2967 IEM_MC_LOCAL(uint64_t, uSrc);
2968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2969
2970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2972 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2973 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2974
2975 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2976 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2977
2978 IEM_MC_ADVANCE_RIP_AND_FINISH();
2979 IEM_MC_END();
2980 }
2981
2982 /**
2983 * @opdone
2984 * @opmnemonic ud660f16m3
2985 * @opcode 0x16
2986 * @opcodesub 11 mr/reg
2987 * @oppfx 0x66
2988 * @opunused immediate
2989 * @opcpuid sse
2990 * @optest ->
2991 */
2992 else
2993 IEMOP_RAISE_INVALID_OPCODE_RET();
2994}
2995
2996
2997/**
2998 * @opcode 0x16
2999 * @oppfx 0xf3
3000 * @opcpuid sse3
3001 * @opgroup og_sse3_pcksclr_datamove
3002 * @opxcpttype 4
3003 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3004 * op1=0x00000002000000020000000100000001
3005 */
3006FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3007{
3008 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3009 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3010 if (IEM_IS_MODRM_REG_MODE(bRm))
3011 {
3012 /*
3013 * XMM128, XMM128.
3014 */
3015 IEM_MC_BEGIN(0, 0);
3016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3017 IEM_MC_LOCAL(RTUINT128U, uSrc);
3018
3019 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3020 IEM_MC_PREPARE_SSE_USAGE();
3021
3022 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3023 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3024 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3025 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3026 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3027
3028 IEM_MC_ADVANCE_RIP_AND_FINISH();
3029 IEM_MC_END();
3030 }
3031 else
3032 {
3033 /*
3034 * XMM128, [mem128].
3035 */
3036 IEM_MC_BEGIN(0, 0);
3037 IEM_MC_LOCAL(RTUINT128U, uSrc);
3038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3039
3040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3042 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3043 IEM_MC_PREPARE_SSE_USAGE();
3044
3045 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3046 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3047 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3048 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3049 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3050
3051 IEM_MC_ADVANCE_RIP_AND_FINISH();
3052 IEM_MC_END();
3053 }
3054}
3055
3056/**
3057 * @opdone
3058 * @opmnemonic udf30f16
3059 * @opcode 0x16
3060 * @oppfx 0xf2
3061 * @opunused intel-modrm
3062 * @opcpuid sse
3063 * @optest ->
3064 * @opdone
3065 */
3066
3067
3068/**
3069 * @opcode 0x17
3070 * @opcodesub !11 mr/reg
3071 * @oppfx none
3072 * @opcpuid sse
3073 * @opgroup og_sse_simdfp_datamove
3074 * @opxcpttype 5
3075 * @optest op1=1 op2=2 -> op1=2
3076 * @optest op1=0 op2=-42 -> op1=-42
3077 */
3078FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3079{
3080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3081 if (IEM_IS_MODRM_MEM_MODE(bRm))
3082 {
3083 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3084
3085 IEM_MC_BEGIN(0, 0);
3086 IEM_MC_LOCAL(uint64_t, uSrc);
3087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3088
3089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3091 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3092 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3093
3094 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3095 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3096
3097 IEM_MC_ADVANCE_RIP_AND_FINISH();
3098 IEM_MC_END();
3099 }
3100
3101 /**
3102 * @opdone
3103 * @opmnemonic ud0f17m3
3104 * @opcode 0x17
3105 * @opcodesub 11 mr/reg
3106 * @oppfx none
3107 * @opunused immediate
3108 * @opcpuid sse
3109 * @optest ->
3110 */
3111 else
3112 IEMOP_RAISE_INVALID_OPCODE_RET();
3113}
3114
3115
3116/**
3117 * @opcode 0x17
3118 * @opcodesub !11 mr/reg
3119 * @oppfx 0x66
3120 * @opcpuid sse2
3121 * @opgroup og_sse2_pcksclr_datamove
3122 * @opxcpttype 5
3123 * @optest op1=1 op2=2 -> op1=2
3124 * @optest op1=0 op2=-42 -> op1=-42
3125 */
3126FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3127{
3128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3129 if (IEM_IS_MODRM_MEM_MODE(bRm))
3130 {
3131 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3132
3133 IEM_MC_BEGIN(0, 0);
3134 IEM_MC_LOCAL(uint64_t, uSrc);
3135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3136
3137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3139 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3140 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3141
3142 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3143 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3144
3145 IEM_MC_ADVANCE_RIP_AND_FINISH();
3146 IEM_MC_END();
3147 }
3148
3149 /**
3150 * @opdone
3151 * @opmnemonic ud660f17m3
3152 * @opcode 0x17
3153 * @opcodesub 11 mr/reg
3154 * @oppfx 0x66
3155 * @opunused immediate
3156 * @opcpuid sse
3157 * @optest ->
3158 */
3159 else
3160 IEMOP_RAISE_INVALID_OPCODE_RET();
3161}
3162
3163
3164/**
3165 * @opdone
3166 * @opmnemonic udf30f17
3167 * @opcode 0x17
3168 * @oppfx 0xf3
3169 * @opunused intel-modrm
3170 * @opcpuid sse
3171 * @optest ->
3172 * @opdone
3173 */
3174
3175/**
3176 * @opmnemonic udf20f17
3177 * @opcode 0x17
3178 * @oppfx 0xf2
3179 * @opunused intel-modrm
3180 * @opcpuid sse
3181 * @optest ->
3182 * @opdone
3183 */
3184
3185
3186/** Opcode 0x0f 0x18. */
3187FNIEMOP_DEF(iemOp_prefetch_Grp16)
3188{
3189 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3190 if (IEM_IS_MODRM_MEM_MODE(bRm))
3191 {
3192 switch (IEM_GET_MODRM_REG_8(bRm))
3193 {
3194 case 4: /* Aliased to /0 for the time being according to AMD. */
3195 case 5: /* Aliased to /0 for the time being according to AMD. */
3196 case 6: /* Aliased to /0 for the time being according to AMD. */
3197 case 7: /* Aliased to /0 for the time being according to AMD. */
3198 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3199 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3200 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3201 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3203 }
3204
3205 IEM_MC_BEGIN(0, 0);
3206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3209 /* Currently a NOP. */
3210 IEM_MC_NOREF(GCPtrEffSrc);
3211 IEM_MC_ADVANCE_RIP_AND_FINISH();
3212 IEM_MC_END();
3213 }
3214 else
3215 IEMOP_RAISE_INVALID_OPCODE_RET();
3216}
3217
3218
3219/** Opcode 0x0f 0x19..0x1f. */
3220FNIEMOP_DEF(iemOp_nop_Ev)
3221{
3222 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3223 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3224 if (IEM_IS_MODRM_REG_MODE(bRm))
3225 {
3226 IEM_MC_BEGIN(0, 0);
3227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3228 IEM_MC_ADVANCE_RIP_AND_FINISH();
3229 IEM_MC_END();
3230 }
3231 else
3232 {
3233 IEM_MC_BEGIN(0, 0);
3234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3237 /* Currently a NOP. */
3238 IEM_MC_NOREF(GCPtrEffSrc);
3239 IEM_MC_ADVANCE_RIP_AND_FINISH();
3240 IEM_MC_END();
3241 }
3242}
3243
3244
3245/** Opcode 0x0f 0x20. */
3246FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3247{
3248 /* mod is ignored, as is operand size overrides. */
3249/** @todo testcase: check memory encoding. */
3250 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3251 IEMOP_HLP_MIN_386();
3252 if (IEM_IS_64BIT_CODE(pVCpu))
3253 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3254 else
3255 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3256
3257 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3258 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3259 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3260 {
3261 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3262 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3263 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3264 iCrReg |= 8;
3265 }
3266 switch (iCrReg)
3267 {
3268 case 0: case 2: case 3: case 4: case 8:
3269 break;
3270 default:
3271 IEMOP_RAISE_INVALID_OPCODE_RET();
3272 }
3273 IEMOP_HLP_DONE_DECODING();
3274
3275 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3276 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3277 iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3278}
3279
3280
3281/** Opcode 0x0f 0x21. */
3282FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3283{
3284/** @todo testcase: check memory encoding. */
3285 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3286 IEMOP_HLP_MIN_386();
3287 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3289 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3290 IEMOP_RAISE_INVALID_OPCODE_RET();
3291 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3292 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3293 iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3294}
3295
3296
3297/** Opcode 0x0f 0x22. */
3298FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3299{
3300 /* mod is ignored, as is operand size overrides. */
3301 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3302 IEMOP_HLP_MIN_386();
3303 if (IEM_IS_64BIT_CODE(pVCpu))
3304 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3305 else
3306 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3307
3308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3309 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3310 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3311 {
3312 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3313 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3314 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3315 iCrReg |= 8;
3316 }
3317 switch (iCrReg)
3318 {
3319 case 0: case 2: case 3: case 4: case 8:
3320 break;
3321 default:
3322 IEMOP_RAISE_INVALID_OPCODE_RET();
3323 }
3324 IEMOP_HLP_DONE_DECODING();
3325
3326 /** @todo r=aeichner Split this up as flushing the cr0 is excessive for crX != 0? */
3327 if (iCrReg & (2 | 8))
3328 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, 0,
3329 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3330 else
3331 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0) | RT_BIT_64(kIemNativeGstReg_Cr4),
3332 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3333}
3334
3335
3336/** Opcode 0x0f 0x23. */
3337FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3338{
3339 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3340 IEMOP_HLP_MIN_386();
3341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3343 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3344 IEMOP_RAISE_INVALID_OPCODE_RET();
3345 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, 0,
3346 iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3347}
3348
3349
3350/** Opcode 0x0f 0x24. */
3351FNIEMOP_DEF(iemOp_mov_Rd_Td)
3352{
3353 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3354 IEMOP_HLP_MIN_386();
3355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3357 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3358 IEMOP_RAISE_INVALID_OPCODE_RET();
3359 IEM_MC_DEFER_TO_CIMPL_2_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3360 iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3361}
3362
3363
3364/** Opcode 0x0f 0x26. */
3365FNIEMOP_DEF(iemOp_mov_Td_Rd)
3366{
3367 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3368 IEMOP_HLP_MIN_386();
3369 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3371 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3372 IEMOP_RAISE_INVALID_OPCODE_RET();
3373 IEM_MC_DEFER_TO_CIMPL_2_RET(0, 0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3374}
3375
3376
3377/**
3378 * @opcode 0x28
3379 * @oppfx none
3380 * @opcpuid sse
3381 * @opgroup og_sse_simdfp_datamove
3382 * @opxcpttype 1
3383 * @optest op1=1 op2=2 -> op1=2
3384 * @optest op1=0 op2=-42 -> op1=-42
3385 */
3386FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3387{
3388 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3390 if (IEM_IS_MODRM_REG_MODE(bRm))
3391 {
3392 /*
3393 * Register, register.
3394 */
3395 IEM_MC_BEGIN(0, 0);
3396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3397 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3398 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3399 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3400 IEM_GET_MODRM_RM(pVCpu, bRm));
3401 IEM_MC_ADVANCE_RIP_AND_FINISH();
3402 IEM_MC_END();
3403 }
3404 else
3405 {
3406 /*
3407 * Register, memory.
3408 */
3409 IEM_MC_BEGIN(0, 0);
3410 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3412
3413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3415 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3416 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3417
3418 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3419 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3420
3421 IEM_MC_ADVANCE_RIP_AND_FINISH();
3422 IEM_MC_END();
3423 }
3424}
3425
3426/**
3427 * @opcode 0x28
3428 * @oppfx 66
3429 * @opcpuid sse2
3430 * @opgroup og_sse2_pcksclr_datamove
3431 * @opxcpttype 1
3432 * @optest op1=1 op2=2 -> op1=2
3433 * @optest op1=0 op2=-42 -> op1=-42
3434 */
3435FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3436{
3437 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3438 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3439 if (IEM_IS_MODRM_REG_MODE(bRm))
3440 {
3441 /*
3442 * Register, register.
3443 */
3444 IEM_MC_BEGIN(0, 0);
3445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3446 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3447 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3448 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3449 IEM_GET_MODRM_RM(pVCpu, bRm));
3450 IEM_MC_ADVANCE_RIP_AND_FINISH();
3451 IEM_MC_END();
3452 }
3453 else
3454 {
3455 /*
3456 * Register, memory.
3457 */
3458 IEM_MC_BEGIN(0, 0);
3459 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3461
3462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3466
3467 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3468 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3469
3470 IEM_MC_ADVANCE_RIP_AND_FINISH();
3471 IEM_MC_END();
3472 }
3473}
3474
3475/* Opcode 0xf3 0x0f 0x28 - invalid */
3476/* Opcode 0xf2 0x0f 0x28 - invalid */
3477
3478/**
3479 * @opcode 0x29
3480 * @oppfx none
3481 * @opcpuid sse
3482 * @opgroup og_sse_simdfp_datamove
3483 * @opxcpttype 1
3484 * @optest op1=1 op2=2 -> op1=2
3485 * @optest op1=0 op2=-42 -> op1=-42
3486 */
3487FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3488{
3489 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3491 if (IEM_IS_MODRM_REG_MODE(bRm))
3492 {
3493 /*
3494 * Register, register.
3495 */
3496 IEM_MC_BEGIN(0, 0);
3497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3498 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3499 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3500 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3501 IEM_GET_MODRM_REG(pVCpu, bRm));
3502 IEM_MC_ADVANCE_RIP_AND_FINISH();
3503 IEM_MC_END();
3504 }
3505 else
3506 {
3507 /*
3508 * Memory, register.
3509 */
3510 IEM_MC_BEGIN(0, 0);
3511 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3513
3514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3516 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3517 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3518
3519 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3520 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3521
3522 IEM_MC_ADVANCE_RIP_AND_FINISH();
3523 IEM_MC_END();
3524 }
3525}
3526
3527/**
3528 * @opcode 0x29
3529 * @oppfx 66
3530 * @opcpuid sse2
3531 * @opgroup og_sse2_pcksclr_datamove
3532 * @opxcpttype 1
3533 * @optest op1=1 op2=2 -> op1=2
3534 * @optest op1=0 op2=-42 -> op1=-42
3535 */
3536FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3537{
3538 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3540 if (IEM_IS_MODRM_REG_MODE(bRm))
3541 {
3542 /*
3543 * Register, register.
3544 */
3545 IEM_MC_BEGIN(0, 0);
3546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3547 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3548 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3549 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3550 IEM_GET_MODRM_REG(pVCpu, bRm));
3551 IEM_MC_ADVANCE_RIP_AND_FINISH();
3552 IEM_MC_END();
3553 }
3554 else
3555 {
3556 /*
3557 * Memory, register.
3558 */
3559 IEM_MC_BEGIN(0, 0);
3560 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3562
3563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3565 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3566 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3567
3568 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3569 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3570
3571 IEM_MC_ADVANCE_RIP_AND_FINISH();
3572 IEM_MC_END();
3573 }
3574}
3575
3576/* Opcode 0xf3 0x0f 0x29 - invalid */
3577/* Opcode 0xf2 0x0f 0x29 - invalid */
3578
3579
3580/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3581FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3582{
3583 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3585 if (IEM_IS_MODRM_REG_MODE(bRm))
3586 {
3587 /*
3588 * XMM, MMX
3589 */
3590 IEM_MC_BEGIN(0, 0);
3591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3592 IEM_MC_LOCAL(X86XMMREG, Dst);
3593 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3594 IEM_MC_ARG(uint64_t, u64Src, 1);
3595 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3596 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3597 IEM_MC_PREPARE_FPU_USAGE();
3598 IEM_MC_FPU_TO_MMX_MODE();
3599
3600 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3601 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3602
3603 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2ps_u128, pDst, u64Src);
3604 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3605
3606 IEM_MC_ADVANCE_RIP_AND_FINISH();
3607 IEM_MC_END();
3608 }
3609 else
3610 {
3611 /*
3612 * XMM, [mem64]
3613 */
3614 IEM_MC_BEGIN(0, 0);
3615 IEM_MC_LOCAL(X86XMMREG, Dst);
3616 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3617 IEM_MC_ARG(uint64_t, u64Src, 1);
3618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3619
3620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3622 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3623 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3624 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3625
3626 IEM_MC_PREPARE_FPU_USAGE();
3627 IEM_MC_FPU_TO_MMX_MODE();
3628
3629 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2ps_u128, pDst, u64Src);
3630 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3631
3632 IEM_MC_ADVANCE_RIP_AND_FINISH();
3633 IEM_MC_END();
3634 }
3635}
3636
3637
3638/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3639FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3640{
3641 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3643 if (IEM_IS_MODRM_REG_MODE(bRm))
3644 {
3645 /*
3646 * XMM, MMX
3647 */
3648 IEM_MC_BEGIN(0, 0);
3649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3650 IEM_MC_LOCAL(X86XMMREG, Dst);
3651 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3652 IEM_MC_ARG(uint64_t, u64Src, 1);
3653 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3654 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3655 IEM_MC_PREPARE_FPU_USAGE();
3656 IEM_MC_FPU_TO_MMX_MODE();
3657
3658 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3659
3660 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2pd_u128, pDst, u64Src);
3661 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3662
3663 IEM_MC_ADVANCE_RIP_AND_FINISH();
3664 IEM_MC_END();
3665 }
3666 else
3667 {
3668 /*
3669 * XMM, [mem64]
3670 */
3671 IEM_MC_BEGIN(0, 0);
3672 IEM_MC_LOCAL(X86XMMREG, Dst);
3673 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3674 IEM_MC_ARG(uint64_t, u64Src, 1);
3675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3676
3677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3679 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3680 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3681 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3682
3683 /* Doesn't cause a transition to MMX mode. */
3684 IEM_MC_PREPARE_SSE_USAGE();
3685
3686 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2pd_u128, pDst, u64Src);
3687 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3688
3689 IEM_MC_ADVANCE_RIP_AND_FINISH();
3690 IEM_MC_END();
3691 }
3692}
3693
3694
3695/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3696FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3697{
3698 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3699
3700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3701 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3702 {
3703 if (IEM_IS_MODRM_REG_MODE(bRm))
3704 {
3705 /* XMM, greg64 */
3706 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3707 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3708 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3709 IEM_MC_ARG(const int64_t *, pi64Src, 1);
3710
3711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3712 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3713 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3714
3715 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3716 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i64, pr32Dst, pi64Src);
3717 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3718
3719 IEM_MC_ADVANCE_RIP_AND_FINISH();
3720 IEM_MC_END();
3721 }
3722 else
3723 {
3724 /* XMM, [mem64] */
3725 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3727 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3728 IEM_MC_LOCAL(int64_t, i64Src);
3729 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3730 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 1);
3731
3732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3734 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3735 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3736
3737 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3738 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i64, pr32Dst, pi64Src);
3739 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3740
3741 IEM_MC_ADVANCE_RIP_AND_FINISH();
3742 IEM_MC_END();
3743 }
3744 }
3745 else
3746 {
3747 if (IEM_IS_MODRM_REG_MODE(bRm))
3748 {
3749 /* greg, XMM */
3750 IEM_MC_BEGIN(0, 0);
3751 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3752 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3753 IEM_MC_ARG(const int32_t *, pi32Src, 1);
3754
3755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3756 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3757 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3758
3759 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3760 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i32, pr32Dst, pi32Src);
3761 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3762
3763 IEM_MC_ADVANCE_RIP_AND_FINISH();
3764 IEM_MC_END();
3765 }
3766 else
3767 {
3768 /* greg, [mem32] */
3769 IEM_MC_BEGIN(0, 0);
3770 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3771 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3772 IEM_MC_LOCAL(int32_t, i32Src);
3773 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3774 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 1);
3775
3776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3778 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3779 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3780
3781 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3782 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i32, pr32Dst, pi32Src);
3783 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3784
3785 IEM_MC_ADVANCE_RIP_AND_FINISH();
3786 IEM_MC_END();
3787 }
3788 }
3789}
3790
3791
3792/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3793FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3794{
3795 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3796
3797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3798 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3799 {
3800 if (IEM_IS_MODRM_REG_MODE(bRm))
3801 {
3802 /* XMM, greg64 */
3803 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3804 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3805 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3806 IEM_MC_ARG(const int64_t *, pi64Src, 1);
3807
3808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3809 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3810 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3811
3812 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3813 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i64, pr64Dst, pi64Src);
3814 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3815
3816 IEM_MC_ADVANCE_RIP_AND_FINISH();
3817 IEM_MC_END();
3818 }
3819 else
3820 {
3821 /* XMM, [mem64] */
3822 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3824 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3825 IEM_MC_LOCAL(int64_t, i64Src);
3826 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3827 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 1);
3828
3829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3831 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3832 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3833
3834 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3835 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i64, pr64Dst, pi64Src);
3836 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3837
3838 IEM_MC_ADVANCE_RIP_AND_FINISH();
3839 IEM_MC_END();
3840 }
3841 }
3842 else
3843 {
3844 if (IEM_IS_MODRM_REG_MODE(bRm))
3845 {
3846 /* XMM, greg32 */
3847 IEM_MC_BEGIN(0, 0);
3848 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3849 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3850 IEM_MC_ARG(const int32_t *, pi32Src, 1);
3851
3852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3853 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3854 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3855
3856 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3857 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i32, pr64Dst, pi32Src);
3858 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3859
3860 IEM_MC_ADVANCE_RIP_AND_FINISH();
3861 IEM_MC_END();
3862 }
3863 else
3864 {
3865 /* XMM, [mem32] */
3866 IEM_MC_BEGIN(0, 0);
3867 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3868 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3869 IEM_MC_LOCAL(int32_t, i32Src);
3870 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3871 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 1);
3872
3873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3875 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3876 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3877
3878 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3879 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i32, pr64Dst, pi32Src);
3880 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3881
3882 IEM_MC_ADVANCE_RIP_AND_FINISH();
3883 IEM_MC_END();
3884 }
3885 }
3886}
3887
3888
3889/**
3890 * @opcode 0x2b
3891 * @opcodesub !11 mr/reg
3892 * @oppfx none
3893 * @opcpuid sse
3894 * @opgroup og_sse1_cachect
3895 * @opxcpttype 1
3896 * @optest op1=1 op2=2 -> op1=2
3897 * @optest op1=0 op2=-42 -> op1=-42
3898 */
3899FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
3900{
3901 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3902 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3903 if (IEM_IS_MODRM_MEM_MODE(bRm))
3904 {
3905 /*
3906 * memory, register.
3907 */
3908 IEM_MC_BEGIN(0, 0);
3909 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3911
3912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3914 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3915 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3916
3917 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3918 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3919
3920 IEM_MC_ADVANCE_RIP_AND_FINISH();
3921 IEM_MC_END();
3922 }
3923 /* The register, register encoding is invalid. */
3924 else
3925 IEMOP_RAISE_INVALID_OPCODE_RET();
3926}
3927
3928/**
3929 * @opcode 0x2b
3930 * @opcodesub !11 mr/reg
3931 * @oppfx 0x66
3932 * @opcpuid sse2
3933 * @opgroup og_sse2_cachect
3934 * @opxcpttype 1
3935 * @optest op1=1 op2=2 -> op1=2
3936 * @optest op1=0 op2=-42 -> op1=-42
3937 */
3938FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
3939{
3940 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3941 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3942 if (IEM_IS_MODRM_MEM_MODE(bRm))
3943 {
3944 /*
3945 * memory, register.
3946 */
3947 IEM_MC_BEGIN(0, 0);
3948 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3950
3951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3953 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3954 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3955
3956 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3957 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3958
3959 IEM_MC_ADVANCE_RIP_AND_FINISH();
3960 IEM_MC_END();
3961 }
3962 /* The register, register encoding is invalid. */
3963 else
3964 IEMOP_RAISE_INVALID_OPCODE_RET();
3965}
3966/* Opcode 0xf3 0x0f 0x2b - invalid */
3967/* Opcode 0xf2 0x0f 0x2b - invalid */
3968
3969
3970/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
3971FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
3972{
3973 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3975 if (IEM_IS_MODRM_REG_MODE(bRm))
3976 {
3977 /*
3978 * Register, register.
3979 */
3980 IEM_MC_BEGIN(0, 0);
3981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3982 IEM_MC_LOCAL(uint64_t, u64Dst);
3983 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
3984 IEM_MC_ARG(uint64_t, u64Src, 1);
3985 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3986 IEM_MC_PREPARE_FPU_USAGE();
3987 IEM_MC_FPU_TO_MMX_MODE();
3988
3989 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
3990
3991 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttps2pi_u128, pu64Dst, u64Src);
3992 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
3993
3994 IEM_MC_ADVANCE_RIP_AND_FINISH();
3995 IEM_MC_END();
3996 }
3997 else
3998 {
3999 /*
4000 * Register, memory.
4001 */
4002 IEM_MC_BEGIN(0, 0);
4003 IEM_MC_LOCAL(uint64_t, u64Dst);
4004 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4005 IEM_MC_ARG(uint64_t, u64Src, 1);
4006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4007
4008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4010 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4011 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4012
4013 IEM_MC_PREPARE_FPU_USAGE();
4014 IEM_MC_FPU_TO_MMX_MODE();
4015
4016 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttps2pi_u128, pu64Dst, u64Src);
4017 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4018
4019 IEM_MC_ADVANCE_RIP_AND_FINISH();
4020 IEM_MC_END();
4021 }
4022}
4023
4024
4025/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4026FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4027{
4028 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4029 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4030 if (IEM_IS_MODRM_REG_MODE(bRm))
4031 {
4032 /*
4033 * Register, register.
4034 */
4035 IEM_MC_BEGIN(0, 0);
4036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4037 IEM_MC_LOCAL(uint64_t, u64Dst);
4038 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4039 IEM_MC_ARG(PCX86XMMREG, pSrc, 1);
4040 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4041 IEM_MC_PREPARE_FPU_USAGE();
4042 IEM_MC_FPU_TO_MMX_MODE();
4043
4044 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4045
4046 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttpd2pi_u128, pu64Dst, pSrc);
4047 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4048
4049 IEM_MC_ADVANCE_RIP_AND_FINISH();
4050 IEM_MC_END();
4051 }
4052 else
4053 {
4054 /*
4055 * Register, memory.
4056 */
4057 IEM_MC_BEGIN(0, 0);
4058 IEM_MC_LOCAL(uint64_t, u64Dst);
4059 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4060 IEM_MC_LOCAL(X86XMMREG, uSrc);
4061 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 1);
4062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4063
4064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4066 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4067 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4068
4069 IEM_MC_PREPARE_FPU_USAGE();
4070 IEM_MC_FPU_TO_MMX_MODE();
4071
4072 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttpd2pi_u128, pu64Dst, pSrc);
4073 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4074
4075 IEM_MC_ADVANCE_RIP_AND_FINISH();
4076 IEM_MC_END();
4077 }
4078}
4079
4080
4081/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4082FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4083{
4084 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4085
4086 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4087 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4088 {
4089 if (IEM_IS_MODRM_REG_MODE(bRm))
4090 {
4091 /* greg64, XMM */
4092 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4093 IEM_MC_LOCAL(int64_t, i64Dst);
4094 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4095 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4096
4097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4098 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4099 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4100
4101 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4102 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i64_r32, pi64Dst, pu32Src);
4103 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4104
4105 IEM_MC_ADVANCE_RIP_AND_FINISH();
4106 IEM_MC_END();
4107 }
4108 else
4109 {
4110 /* greg64, [mem64] */
4111 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4113 IEM_MC_LOCAL(int64_t, i64Dst);
4114 IEM_MC_LOCAL(uint32_t, u32Src);
4115 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4116 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4117
4118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4120 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4121 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4122
4123 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4124 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i64_r32, pi64Dst, pu32Src);
4125 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4126
4127 IEM_MC_ADVANCE_RIP_AND_FINISH();
4128 IEM_MC_END();
4129 }
4130 }
4131 else
4132 {
4133 if (IEM_IS_MODRM_REG_MODE(bRm))
4134 {
4135 /* greg, XMM */
4136 IEM_MC_BEGIN(0, 0);
4137 IEM_MC_LOCAL(int32_t, i32Dst);
4138 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4139 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4140
4141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4142 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4143 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4144
4145 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4146 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i32_r32, pi32Dst, pu32Src);
4147 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4148
4149 IEM_MC_ADVANCE_RIP_AND_FINISH();
4150 IEM_MC_END();
4151 }
4152 else
4153 {
4154 /* greg, [mem] */
4155 IEM_MC_BEGIN(0, 0);
4156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4157 IEM_MC_LOCAL(int32_t, i32Dst);
4158 IEM_MC_LOCAL(uint32_t, u32Src);
4159 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4160 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4161
4162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4164 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4165 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4166
4167 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4168 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i32_r32, pi32Dst, pu32Src);
4169 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4170
4171 IEM_MC_ADVANCE_RIP_AND_FINISH();
4172 IEM_MC_END();
4173 }
4174 }
4175}
4176
4177
4178/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4179FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4180{
4181 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4182
4183 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4184 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4185 {
4186 if (IEM_IS_MODRM_REG_MODE(bRm))
4187 {
4188 /* greg64, XMM */
4189 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4190 IEM_MC_LOCAL(int64_t, i64Dst);
4191 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4192 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4193
4194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4195 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4196 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4197
4198 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4199 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i64_r64, pi64Dst, pu64Src);
4200 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4201
4202 IEM_MC_ADVANCE_RIP_AND_FINISH();
4203 IEM_MC_END();
4204 }
4205 else
4206 {
4207 /* greg64, [mem64] */
4208 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4210 IEM_MC_LOCAL(int64_t, i64Dst);
4211 IEM_MC_LOCAL(uint64_t, u64Src);
4212 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4213 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4214
4215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4217 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4218 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4219
4220 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4221 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i64_r64, pi64Dst, pu64Src);
4222 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4223
4224 IEM_MC_ADVANCE_RIP_AND_FINISH();
4225 IEM_MC_END();
4226 }
4227 }
4228 else
4229 {
4230 if (IEM_IS_MODRM_REG_MODE(bRm))
4231 {
4232 /* greg, XMM */
4233 IEM_MC_BEGIN(0, 0);
4234 IEM_MC_LOCAL(int32_t, i32Dst);
4235 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4236 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4237
4238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4239 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4240 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4241
4242 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4243 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i32_r64, pi32Dst, pu64Src);
4244 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4245
4246 IEM_MC_ADVANCE_RIP_AND_FINISH();
4247 IEM_MC_END();
4248 }
4249 else
4250 {
4251 /* greg32, [mem32] */
4252 IEM_MC_BEGIN(0, 0);
4253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4254 IEM_MC_LOCAL(int32_t, i32Dst);
4255 IEM_MC_LOCAL(uint64_t, u64Src);
4256 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4257 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4258
4259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4261 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4262 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4263
4264 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4265 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i32_r64, pi32Dst, pu64Src);
4266 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4267
4268 IEM_MC_ADVANCE_RIP_AND_FINISH();
4269 IEM_MC_END();
4270 }
4271 }
4272}
4273
4274
4275/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4276FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4277{
4278 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4280 if (IEM_IS_MODRM_REG_MODE(bRm))
4281 {
4282 /*
4283 * Register, register.
4284 */
4285 IEM_MC_BEGIN(0, 0);
4286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4287 IEM_MC_LOCAL(uint64_t, u64Dst);
4288 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4289 IEM_MC_ARG(uint64_t, u64Src, 1);
4290
4291 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4292 IEM_MC_PREPARE_FPU_USAGE();
4293 IEM_MC_FPU_TO_MMX_MODE();
4294
4295 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4296
4297 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pi_u128, pu64Dst, u64Src);
4298 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4299
4300 IEM_MC_ADVANCE_RIP_AND_FINISH();
4301 IEM_MC_END();
4302 }
4303 else
4304 {
4305 /*
4306 * Register, memory.
4307 */
4308 IEM_MC_BEGIN(0, 0);
4309 IEM_MC_LOCAL(uint64_t, u64Dst);
4310 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4311 IEM_MC_ARG(uint64_t, u64Src, 1);
4312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4313
4314 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4316 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4317 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4318
4319 IEM_MC_PREPARE_FPU_USAGE();
4320 IEM_MC_FPU_TO_MMX_MODE();
4321
4322 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pi_u128, pu64Dst, u64Src);
4323 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4324
4325 IEM_MC_ADVANCE_RIP_AND_FINISH();
4326 IEM_MC_END();
4327 }
4328}
4329
4330
4331/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4332FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4333{
4334 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4335 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4336 if (IEM_IS_MODRM_REG_MODE(bRm))
4337 {
4338 /*
4339 * Register, register.
4340 */
4341 IEM_MC_BEGIN(0, 0);
4342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4343 IEM_MC_LOCAL(uint64_t, u64Dst);
4344 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4345 IEM_MC_ARG(PCX86XMMREG, pSrc, 1);
4346
4347 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4348 IEM_MC_PREPARE_FPU_USAGE();
4349 IEM_MC_FPU_TO_MMX_MODE();
4350
4351 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4352
4353 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpd2pi_u128, pu64Dst, pSrc);
4354 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4355
4356 IEM_MC_ADVANCE_RIP_AND_FINISH();
4357 IEM_MC_END();
4358 }
4359 else
4360 {
4361 /*
4362 * Register, memory.
4363 */
4364 IEM_MC_BEGIN(0, 0);
4365 IEM_MC_LOCAL(uint64_t, u64Dst);
4366 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4367 IEM_MC_LOCAL(X86XMMREG, uSrc);
4368 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 1);
4369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4370
4371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4373 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4374 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4375
4376 IEM_MC_PREPARE_FPU_USAGE();
4377 IEM_MC_FPU_TO_MMX_MODE();
4378
4379 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpd2pi_u128, pu64Dst, pSrc);
4380 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4381
4382 IEM_MC_ADVANCE_RIP_AND_FINISH();
4383 IEM_MC_END();
4384 }
4385}
4386
4387
4388/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4389FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4390{
4391 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4392
4393 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4394 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4395 {
4396 if (IEM_IS_MODRM_REG_MODE(bRm))
4397 {
4398 /* greg64, XMM */
4399 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4400 IEM_MC_LOCAL(int64_t, i64Dst);
4401 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4402 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4403
4404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4405 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4406 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4407
4408 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4409 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i64_r32, pi64Dst, pu32Src);
4410 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4411
4412 IEM_MC_ADVANCE_RIP_AND_FINISH();
4413 IEM_MC_END();
4414 }
4415 else
4416 {
4417 /* greg64, [mem64] */
4418 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4420 IEM_MC_LOCAL(int64_t, i64Dst);
4421 IEM_MC_LOCAL(uint32_t, u32Src);
4422 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4423 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4424
4425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4427 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4428 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4429
4430 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4431 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i64_r32, pi64Dst, pu32Src);
4432 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4433
4434 IEM_MC_ADVANCE_RIP_AND_FINISH();
4435 IEM_MC_END();
4436 }
4437 }
4438 else
4439 {
4440 if (IEM_IS_MODRM_REG_MODE(bRm))
4441 {
4442 /* greg, XMM */
4443 IEM_MC_BEGIN(0, 0);
4444 IEM_MC_LOCAL(int32_t, i32Dst);
4445 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4446 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4447
4448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4449 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4450 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4451
4452 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4453 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i32_r32, pi32Dst, pu32Src);
4454 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4455
4456 IEM_MC_ADVANCE_RIP_AND_FINISH();
4457 IEM_MC_END();
4458 }
4459 else
4460 {
4461 /* greg, [mem] */
4462 IEM_MC_BEGIN(0, 0);
4463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4464 IEM_MC_LOCAL(int32_t, i32Dst);
4465 IEM_MC_LOCAL(uint32_t, u32Src);
4466 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4467 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4468
4469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4471 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4472 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4473
4474 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4475 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i32_r32, pi32Dst, pu32Src);
4476 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4477
4478 IEM_MC_ADVANCE_RIP_AND_FINISH();
4479 IEM_MC_END();
4480 }
4481 }
4482}
4483
4484
4485/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4486FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4487{
4488 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4489
4490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4491 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4492 {
4493 if (IEM_IS_MODRM_REG_MODE(bRm))
4494 {
4495 /* greg64, XMM */
4496 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4497 IEM_MC_LOCAL(int64_t, i64Dst);
4498 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4499 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4500
4501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4502 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4503 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4504
4505 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4506 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i64_r64, pi64Dst, pu64Src);
4507 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4508
4509 IEM_MC_ADVANCE_RIP_AND_FINISH();
4510 IEM_MC_END();
4511 }
4512 else
4513 {
4514 /* greg64, [mem64] */
4515 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4517 IEM_MC_LOCAL(int64_t, i64Dst);
4518 IEM_MC_LOCAL(uint64_t, u64Src);
4519 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4520 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4521
4522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4524 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4525 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4526
4527 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4528 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i64_r64, pi64Dst, pu64Src);
4529 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4530
4531 IEM_MC_ADVANCE_RIP_AND_FINISH();
4532 IEM_MC_END();
4533 }
4534 }
4535 else
4536 {
4537 if (IEM_IS_MODRM_REG_MODE(bRm))
4538 {
4539 /* greg32, XMM */
4540 IEM_MC_BEGIN(0, 0);
4541 IEM_MC_LOCAL(int32_t, i32Dst);
4542 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4543 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4544
4545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4546 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4547 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4548
4549 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4550 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i32_r64, pi32Dst, pu64Src);
4551 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4552
4553 IEM_MC_ADVANCE_RIP_AND_FINISH();
4554 IEM_MC_END();
4555 }
4556 else
4557 {
4558 /* greg32, [mem64] */
4559 IEM_MC_BEGIN(0, 0);
4560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4561 IEM_MC_LOCAL(int32_t, i32Dst);
4562 IEM_MC_LOCAL(uint64_t, u64Src);
4563 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4564 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4565
4566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4568 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4569 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4570
4571 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4572 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i32_r64, pi32Dst, pu64Src);
4573 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4574
4575 IEM_MC_ADVANCE_RIP_AND_FINISH();
4576 IEM_MC_END();
4577 }
4578 }
4579}
4580
4581
4582/**
4583 * @opcode 0x2e
4584 * @oppfx none
4585 * @opflmodify cf,pf,af,zf,sf,of
4586 * @opflclear af,sf,of
4587 */
4588FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4589{
4590 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4591 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4592 if (IEM_IS_MODRM_REG_MODE(bRm))
4593 {
4594 /*
4595 * Register, register.
4596 */
4597 IEM_MC_BEGIN(0, 0);
4598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4599 IEM_MC_LOCAL(uint32_t, fEFlags);
4600 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4601 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4602 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4603 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4604 IEM_MC_PREPARE_SSE_USAGE();
4605 IEM_MC_FETCH_EFLAGS(fEFlags);
4606 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4607 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
4608 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomiss_u128, pEFlags, uSrc1, uSrc2);
4609 IEM_MC_COMMIT_EFLAGS(fEFlags);
4610
4611 IEM_MC_ADVANCE_RIP_AND_FINISH();
4612 IEM_MC_END();
4613 }
4614 else
4615 {
4616 /*
4617 * Register, memory.
4618 */
4619 IEM_MC_BEGIN(0, 0);
4620 IEM_MC_LOCAL(uint32_t, fEFlags);
4621 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4622 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4623 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4625
4626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4628 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4629 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4630
4631 IEM_MC_PREPARE_SSE_USAGE();
4632 IEM_MC_FETCH_EFLAGS(fEFlags);
4633 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4634 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomiss_u128, pEFlags, uSrc1, uSrc2);
4635 IEM_MC_COMMIT_EFLAGS(fEFlags);
4636
4637 IEM_MC_ADVANCE_RIP_AND_FINISH();
4638 IEM_MC_END();
4639 }
4640}
4641
4642
4643/**
4644 * @opcode 0x2e
4645 * @oppfx 0x66
4646 * @opflmodify cf,pf,af,zf,sf,of
4647 * @opflclear af,sf,of
4648 */
4649FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4650{
4651 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4652 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4653 if (IEM_IS_MODRM_REG_MODE(bRm))
4654 {
4655 /*
4656 * Register, register.
4657 */
4658 IEM_MC_BEGIN(0, 0);
4659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4660 IEM_MC_LOCAL(uint32_t, fEFlags);
4661 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4662 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4663 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4664 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4665 IEM_MC_PREPARE_SSE_USAGE();
4666 IEM_MC_FETCH_EFLAGS(fEFlags);
4667 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4668 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
4669 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomisd_u128, pEFlags, uSrc1, uSrc2);
4670 IEM_MC_COMMIT_EFLAGS(fEFlags);
4671
4672 IEM_MC_ADVANCE_RIP_AND_FINISH();
4673 IEM_MC_END();
4674 }
4675 else
4676 {
4677 /*
4678 * Register, memory.
4679 */
4680 IEM_MC_BEGIN(0, 0);
4681 IEM_MC_LOCAL(uint32_t, fEFlags);
4682 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4683 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4684 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4686
4687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4689 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4690 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4691
4692 IEM_MC_PREPARE_SSE_USAGE();
4693 IEM_MC_FETCH_EFLAGS(fEFlags);
4694 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4695 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomisd_u128, pEFlags, uSrc1, uSrc2);
4696 IEM_MC_COMMIT_EFLAGS(fEFlags);
4697
4698 IEM_MC_ADVANCE_RIP_AND_FINISH();
4699 IEM_MC_END();
4700 }
4701}
4702
4703
4704/* Opcode 0xf3 0x0f 0x2e - invalid */
4705/* Opcode 0xf2 0x0f 0x2e - invalid */
4706
4707
4708/**
4709 * @opcode 0x2e
4710 * @oppfx none
4711 * @opflmodify cf,pf,af,zf,sf,of
4712 * @opflclear af,sf,of
4713 */
4714FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
4715{
4716 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4717 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4718 if (IEM_IS_MODRM_REG_MODE(bRm))
4719 {
4720 /*
4721 * Register, register.
4722 */
4723 IEM_MC_BEGIN(0, 0);
4724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4725 IEM_MC_LOCAL(uint32_t, fEFlags);
4726 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4727 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4728 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4729 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4730 IEM_MC_PREPARE_SSE_USAGE();
4731 IEM_MC_FETCH_EFLAGS(fEFlags);
4732 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4733 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
4734 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comiss_u128, pEFlags, uSrc1, uSrc2);
4735 IEM_MC_COMMIT_EFLAGS(fEFlags);
4736
4737 IEM_MC_ADVANCE_RIP_AND_FINISH();
4738 IEM_MC_END();
4739 }
4740 else
4741 {
4742 /*
4743 * Register, memory.
4744 */
4745 IEM_MC_BEGIN(0, 0);
4746 IEM_MC_LOCAL(uint32_t, fEFlags);
4747 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4748 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4749 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4751
4752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4754 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4755 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4756
4757 IEM_MC_PREPARE_SSE_USAGE();
4758 IEM_MC_FETCH_EFLAGS(fEFlags);
4759 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4760 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comiss_u128, pEFlags, uSrc1, uSrc2);
4761 IEM_MC_COMMIT_EFLAGS(fEFlags);
4762
4763 IEM_MC_ADVANCE_RIP_AND_FINISH();
4764 IEM_MC_END();
4765 }
4766}
4767
4768
4769/**
4770 * @opcode 0x2f
4771 * @oppfx 0x66
4772 * @opflmodify cf,pf,af,zf,sf,of
4773 * @opflclear af,sf,of
4774 */
4775FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
4776{
4777 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4778 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4779 if (IEM_IS_MODRM_REG_MODE(bRm))
4780 {
4781 /*
4782 * Register, register.
4783 */
4784 IEM_MC_BEGIN(0, 0);
4785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4786 IEM_MC_LOCAL(uint32_t, fEFlags);
4787 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4788 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4789 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4790 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4791 IEM_MC_PREPARE_SSE_USAGE();
4792 IEM_MC_FETCH_EFLAGS(fEFlags);
4793 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4794 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
4795 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comisd_u128, pEFlags, uSrc1, uSrc2);
4796 IEM_MC_COMMIT_EFLAGS(fEFlags);
4797
4798 IEM_MC_ADVANCE_RIP_AND_FINISH();
4799 IEM_MC_END();
4800 }
4801 else
4802 {
4803 /*
4804 * Register, memory.
4805 */
4806 IEM_MC_BEGIN(0, 0);
4807 IEM_MC_LOCAL(uint32_t, fEFlags);
4808 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4809 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4810 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4812
4813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4815 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4816 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4817
4818 IEM_MC_PREPARE_SSE_USAGE();
4819 IEM_MC_FETCH_EFLAGS(fEFlags);
4820 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4821 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comisd_u128, pEFlags, uSrc1, uSrc2);
4822 IEM_MC_COMMIT_EFLAGS(fEFlags);
4823
4824 IEM_MC_ADVANCE_RIP_AND_FINISH();
4825 IEM_MC_END();
4826 }
4827}
4828
4829
4830/* Opcode 0xf3 0x0f 0x2f - invalid */
4831/* Opcode 0xf2 0x0f 0x2f - invalid */
4832
4833/** Opcode 0x0f 0x30. */
4834FNIEMOP_DEF(iemOp_wrmsr)
4835{
4836 IEMOP_MNEMONIC(wrmsr, "wrmsr");
4837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4838 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wrmsr);
4839}
4840
4841
4842/** Opcode 0x0f 0x31. */
4843FNIEMOP_DEF(iemOp_rdtsc)
4844{
4845 IEMOP_MNEMONIC(rdtsc, "rdtsc");
4846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4847 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4848 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4849 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4850 iemCImpl_rdtsc);
4851}
4852
4853
4854/** Opcode 0x0f 0x33. */
4855FNIEMOP_DEF(iemOp_rdmsr)
4856{
4857 IEMOP_MNEMONIC(rdmsr, "rdmsr");
4858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4859 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4860 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4861 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4862 iemCImpl_rdmsr);
4863}
4864
4865
4866/** Opcode 0x0f 0x34. */
4867FNIEMOP_DEF(iemOp_rdpmc)
4868{
4869 IEMOP_MNEMONIC(rdpmc, "rdpmc");
4870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4871 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4872 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4873 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4874 iemCImpl_rdpmc);
4875}
4876
4877
4878/** Opcode 0x0f 0x34. */
4879FNIEMOP_DEF(iemOp_sysenter)
4880{
4881 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4883 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
4884 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
4885 iemCImpl_sysenter);
4886}
4887
4888/** Opcode 0x0f 0x35. */
4889FNIEMOP_DEF(iemOp_sysexit)
4890{
4891 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4893 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
4894 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
4895 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
4896}
4897
4898/** Opcode 0x0f 0x37. */
4899FNIEMOP_STUB(iemOp_getsec);
4900
4901
4902/** Opcode 0x0f 0x38. */
4903FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
4904{
4905#ifdef IEM_WITH_THREE_0F_38
4906 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4907 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4908#else
4909 IEMOP_BITCH_ABOUT_STUB();
4910 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4911#endif
4912}
4913
4914
4915/** Opcode 0x0f 0x3a. */
4916FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
4917{
4918#ifdef IEM_WITH_THREE_0F_3A
4919 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4920 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4921#else
4922 IEMOP_BITCH_ABOUT_STUB();
4923 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4924#endif
4925}
4926
4927
4928/**
4929 * Implements a conditional move.
4930 *
4931 * Wish there was an obvious way to do this where we could share and reduce
4932 * code bloat.
4933 *
4934 * @param a_Cnd The conditional "microcode" operation.
4935 */
4936#define CMOV_X(a_Cnd) \
4937 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4938 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4939 { \
4940 switch (pVCpu->iem.s.enmEffOpSize) \
4941 { \
4942 case IEMMODE_16BIT: \
4943 IEM_MC_BEGIN(0, 0); \
4944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4945 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4946 a_Cnd { \
4947 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4948 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4949 } IEM_MC_ENDIF(); \
4950 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4951 IEM_MC_END(); \
4952 break; \
4953 \
4954 case IEMMODE_32BIT: \
4955 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4957 IEM_MC_LOCAL(uint32_t, u32Tmp); \
4958 a_Cnd { \
4959 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4960 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
4961 } IEM_MC_ELSE() { \
4962 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
4963 } IEM_MC_ENDIF(); \
4964 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4965 IEM_MC_END(); \
4966 break; \
4967 \
4968 case IEMMODE_64BIT: \
4969 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4971 IEM_MC_LOCAL(uint64_t, u64Tmp); \
4972 a_Cnd { \
4973 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4974 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
4975 } IEM_MC_ENDIF(); \
4976 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4977 IEM_MC_END(); \
4978 break; \
4979 \
4980 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4981 } \
4982 } \
4983 else \
4984 { \
4985 switch (pVCpu->iem.s.enmEffOpSize) \
4986 { \
4987 case IEMMODE_16BIT: \
4988 IEM_MC_BEGIN(0, 0); \
4989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4990 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4991 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4993 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4994 a_Cnd { \
4995 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4996 } IEM_MC_ENDIF(); \
4997 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4998 IEM_MC_END(); \
4999 break; \
5000 \
5001 case IEMMODE_32BIT: \
5002 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5004 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5007 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5008 a_Cnd { \
5009 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5010 } IEM_MC_ELSE() { \
5011 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5012 } IEM_MC_ENDIF(); \
5013 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5014 IEM_MC_END(); \
5015 break; \
5016 \
5017 case IEMMODE_64BIT: \
5018 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5020 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5023 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5024 a_Cnd { \
5025 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5026 } IEM_MC_ENDIF(); \
5027 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5028 IEM_MC_END(); \
5029 break; \
5030 \
5031 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5032 } \
5033 } do {} while (0)
5034
5035
5036
5037/**
5038 * @opcode 0x40
5039 * @opfltest of
5040 */
5041FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5042{
5043 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5044 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5045}
5046
5047
5048/**
5049 * @opcode 0x41
5050 * @opfltest of
5051 */
5052FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5053{
5054 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5055 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5056}
5057
5058
5059/**
5060 * @opcode 0x42
5061 * @opfltest cf
5062 */
5063FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5064{
5065 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5066 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5067}
5068
5069
5070/**
5071 * @opcode 0x43
5072 * @opfltest cf
5073 */
5074FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5075{
5076 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5077 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5078}
5079
5080
5081/**
5082 * @opcode 0x44
5083 * @opfltest zf
5084 */
5085FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5086{
5087 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5088 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5089}
5090
5091
5092/**
5093 * @opcode 0x45
5094 * @opfltest zf
5095 */
5096FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5097{
5098 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5099 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5100}
5101
5102
5103/**
5104 * @opcode 0x46
5105 * @opfltest cf,zf
5106 */
5107FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5108{
5109 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5110 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5111}
5112
5113
5114/**
5115 * @opcode 0x47
5116 * @opfltest cf,zf
5117 */
5118FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5119{
5120 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5121 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5122}
5123
5124
5125/**
5126 * @opcode 0x48
5127 * @opfltest sf
5128 */
5129FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5130{
5131 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5132 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5133}
5134
5135
5136/**
5137 * @opcode 0x49
5138 * @opfltest sf
5139 */
5140FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5141{
5142 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5143 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5144}
5145
5146
5147/**
5148 * @opcode 0x4a
5149 * @opfltest pf
5150 */
5151FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5152{
5153 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5154 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5155}
5156
5157
5158/**
5159 * @opcode 0x4b
5160 * @opfltest pf
5161 */
5162FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5163{
5164 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5165 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5166}
5167
5168
5169/**
5170 * @opcode 0x4c
5171 * @opfltest sf,of
5172 */
5173FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5174{
5175 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5176 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5177}
5178
5179
5180/**
5181 * @opcode 0x4d
5182 * @opfltest sf,of
5183 */
5184FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5185{
5186 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5187 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5188}
5189
5190
5191/**
5192 * @opcode 0x4e
5193 * @opfltest zf,sf,of
5194 */
5195FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5196{
5197 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5198 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5199}
5200
5201
5202/**
5203 * @opcode 0x4e
5204 * @opfltest zf,sf,of
5205 */
5206FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5207{
5208 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5209 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5210}
5211
5212#undef CMOV_X
5213
5214/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5215FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5216{
5217 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5219 if (IEM_IS_MODRM_REG_MODE(bRm))
5220 {
5221 /*
5222 * Register, register.
5223 */
5224 IEM_MC_BEGIN(0, 0);
5225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5226 IEM_MC_LOCAL(uint8_t, u8Dst);
5227 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5228 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5229 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5230 IEM_MC_PREPARE_SSE_USAGE();
5231 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5232 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5233 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5234 IEM_MC_ADVANCE_RIP_AND_FINISH();
5235 IEM_MC_END();
5236 }
5237 /* No memory operand. */
5238 else
5239 IEMOP_RAISE_INVALID_OPCODE_RET();
5240}
5241
5242
5243/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5244FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5245{
5246 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5247 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5248 if (IEM_IS_MODRM_REG_MODE(bRm))
5249 {
5250 /*
5251 * Register, register.
5252 */
5253 IEM_MC_BEGIN(0, 0);
5254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5255 IEM_MC_LOCAL(uint8_t, u8Dst);
5256 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5257 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5258 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5259 IEM_MC_PREPARE_SSE_USAGE();
5260 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5261 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5262 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5263 IEM_MC_ADVANCE_RIP_AND_FINISH();
5264 IEM_MC_END();
5265 }
5266 /* No memory operand. */
5267 else
5268 IEMOP_RAISE_INVALID_OPCODE_RET();
5269
5270}
5271
5272
5273/* Opcode 0xf3 0x0f 0x50 - invalid */
5274/* Opcode 0xf2 0x0f 0x50 - invalid */
5275
5276
5277/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5278FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5279{
5280 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5281 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5282}
5283
5284
5285/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5286FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5287{
5288 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5289 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5290}
5291
5292
5293/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5294FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5295{
5296 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5297 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5298}
5299
5300
5301/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5302FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5303{
5304 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5305 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5306}
5307
5308
5309/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5310FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5311{
5312 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5313 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5314}
5315
5316
5317/* Opcode 0x66 0x0f 0x52 - invalid */
5318
5319
5320/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5321FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5322{
5323 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5324 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5325}
5326
5327
5328/* Opcode 0xf2 0x0f 0x52 - invalid */
5329
5330
5331/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5332FNIEMOP_DEF(iemOp_rcpps_Vps_Wps)
5333{
5334 IEMOP_MNEMONIC2(RM, RCPPS, rcpps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5335 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rcpps_u128);
5336}
5337
5338
5339/* Opcode 0x66 0x0f 0x53 - invalid */
5340
5341
5342/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5343FNIEMOP_DEF(iemOp_rcpss_Vss_Wss)
5344{
5345 IEMOP_MNEMONIC2(RM, RCPSS, rcpss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5346 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rcpss_u128_r32);
5347}
5348
5349
5350/* Opcode 0xf2 0x0f 0x53 - invalid */
5351
5352
5353/** Opcode 0x0f 0x54 - andps Vps, Wps */
5354FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5355{
5356 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5357 SSE2_OPT_BODY_FullFull_To_Full(pand, iemAImpl_pand_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5358}
5359
5360
5361/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5362FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5363{
5364 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5365 SSE2_OPT_BODY_FullFull_To_Full(pand, iemAImpl_pand_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5366}
5367
5368
5369/* Opcode 0xf3 0x0f 0x54 - invalid */
5370/* Opcode 0xf2 0x0f 0x54 - invalid */
5371
5372
5373/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5374FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5375{
5376 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5377 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_pandn_u128);
5378}
5379
5380
5381/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5382FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5383{
5384 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5385 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pandn_u128);
5386}
5387
5388
5389/* Opcode 0xf3 0x0f 0x55 - invalid */
5390/* Opcode 0xf2 0x0f 0x55 - invalid */
5391
5392
5393/** Opcode 0x0f 0x56 - orps Vps, Wps */
5394FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5395{
5396 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5397 SSE2_OPT_BODY_FullFull_To_Full(por, iemAImpl_por_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5398}
5399
5400
5401/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5402FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5403{
5404 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5405 SSE2_OPT_BODY_FullFull_To_Full(por, iemAImpl_por_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5406}
5407
5408
5409/* Opcode 0xf3 0x0f 0x56 - invalid */
5410/* Opcode 0xf2 0x0f 0x56 - invalid */
5411
5412
5413/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5414FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5415{
5416 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5417 SSE2_OPT_BODY_FullFull_To_Full(pxor, iemAImpl_pxor_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5418}
5419
5420
5421/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5422FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5423{
5424 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5425 SSE2_OPT_BODY_FullFull_To_Full(pxor, iemAImpl_pxor_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5426}
5427
5428
5429/* Opcode 0xf3 0x0f 0x57 - invalid */
5430/* Opcode 0xf2 0x0f 0x57 - invalid */
5431
5432/** Opcode 0x0f 0x58 - addps Vps, Wps */
5433FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5434{
5435 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5436 SSE_FP_BODY_FullFull_To_Full(addps, iemAImpl_addps_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5437}
5438
5439
5440/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5441FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5442{
5443 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5444 SSE_FP_BODY_FullFull_To_Full(addpd, iemAImpl_addpd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5445}
5446
5447
5448/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5449FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5450{
5451 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5452 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5453}
5454
5455
5456/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5457FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5458{
5459 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5460 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5461}
5462
5463
5464/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5465FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5466{
5467 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5468 SSE_FP_BODY_FullFull_To_Full(mulps, iemAImpl_mulps_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5469}
5470
5471
5472/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5473FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5474{
5475 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5476 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5477}
5478
5479
5480/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5481FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5482{
5483 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5484 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5485}
5486
5487
5488/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5489FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5490{
5491 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5492 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5493}
5494
5495
5496/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5497FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5498{
5499 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd_WO, Wps, DISOPTYPE_HARMLESS, 0);
5500 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5501 if (IEM_IS_MODRM_REG_MODE(bRm))
5502 {
5503 /*
5504 * XMM, XMM[63:0].
5505 */
5506 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
5507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5508 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5509 IEM_MC_PREPARE_SSE_USAGE();
5510
5511 IEM_MC_LOCAL(X86XMMREG, SseRes);
5512 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
5513 IEM_MC_ARG(uint64_t const *, pu64Src, 1); /* The input is actually two 32-bit float values, */
5514 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); /* but we've got no matching type or MC. */
5515 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pd_u128, pSseRes, pu64Src);
5516 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
5517
5518 IEM_MC_ADVANCE_RIP_AND_FINISH();
5519 IEM_MC_END();
5520 }
5521 else
5522 {
5523 /*
5524 * XMM, [mem64].
5525 */
5526 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
5527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5530 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5531
5532 IEM_MC_LOCAL(uint64_t, u64Src);
5533 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pu64Src, u64Src, 1); /* (see comment above wrt type) */
5534 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5535
5536 IEM_MC_PREPARE_SSE_USAGE();
5537 IEM_MC_LOCAL(X86XMMREG, SseRes);
5538 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
5539 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pd_u128, pSseRes, pu64Src);
5540 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
5541
5542 IEM_MC_ADVANCE_RIP_AND_FINISH();
5543 IEM_MC_END();
5544 }
5545}
5546
5547
5548/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5549FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5550{
5551 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps_WO, Wpd, DISOPTYPE_HARMLESS, 0);
5552 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5553 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5554}
5555
5556
5557/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5558FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5559{
5560 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5561 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5562}
5563
5564
5565/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5566FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5567{
5568 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5569 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5570}
5571
5572
5573/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5574FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5575{
5576 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5577 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5578 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5579}
5580
5581
5582/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5583FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5584{
5585 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5586 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5587 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5588}
5589
5590
5591/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5592FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5593{
5594 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5595 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5596 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5597}
5598
5599
5600/* Opcode 0xf2 0x0f 0x5b - invalid */
5601
5602
5603/** Opcode 0x0f 0x5c - subps Vps, Wps */
5604FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5605{
5606 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5607 SSE_FP_BODY_FullFull_To_Full(subps, iemAImpl_subps_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5608}
5609
5610
5611/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5612FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5613{
5614 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5615 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5616}
5617
5618
5619/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5620FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5621{
5622 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5623 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5624}
5625
5626
5627/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5628FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5629{
5630 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5631 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5632}
5633
5634
5635/** Opcode 0x0f 0x5d - minps Vps, Wps */
5636FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5637{
5638 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5639 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5640}
5641
5642
5643/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5644FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5645{
5646 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5647 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5648}
5649
5650
5651/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5652FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5653{
5654 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5655 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5656}
5657
5658
5659/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5660FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5661{
5662 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5663 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5664}
5665
5666
5667/** Opcode 0x0f 0x5e - divps Vps, Wps */
5668FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5669{
5670 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5671 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5672}
5673
5674
5675/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5676FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5677{
5678 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5679 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5680}
5681
5682
5683/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5684FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5685{
5686 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5687 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5688}
5689
5690
5691/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5692FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5693{
5694 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5695 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5696}
5697
5698
5699/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5700FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5701{
5702 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5703 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5704}
5705
5706
5707/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5708FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5709{
5710 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5711 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5712}
5713
5714
5715/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5716FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5717{
5718 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5719 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5720}
5721
5722
5723/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5724FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5725{
5726 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5727 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5728}
5729
5730
5731/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5732FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5733{
5734 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5735 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5736}
5737
5738
5739/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5740FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5741{
5742 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5743 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5744}
5745
5746
5747/* Opcode 0xf3 0x0f 0x60 - invalid */
5748
5749
5750/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5751FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5752{
5753 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5754 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5755 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5756}
5757
5758
5759/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5760FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5761{
5762 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5763 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5764}
5765
5766
5767/* Opcode 0xf3 0x0f 0x61 - invalid */
5768
5769
5770/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5771FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5772{
5773 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5774 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5775}
5776
5777
5778/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5779FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5780{
5781 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5782 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5783}
5784
5785
5786/* Opcode 0xf3 0x0f 0x62 - invalid */
5787
5788
5789
5790/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5791FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5792{
5793 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5794 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5795}
5796
5797
5798/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5799FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5800{
5801 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5802 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5803}
5804
5805
5806/* Opcode 0xf3 0x0f 0x63 - invalid */
5807
5808
5809/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5810FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5811{
5812 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5813 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5814}
5815
5816
5817/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5818FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5819{
5820 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5821 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
5822}
5823
5824
5825/* Opcode 0xf3 0x0f 0x64 - invalid */
5826
5827
5828/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
5829FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
5830{
5831 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5832 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
5833}
5834
5835
5836/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
5837FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
5838{
5839 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5840 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
5841}
5842
5843
5844/* Opcode 0xf3 0x0f 0x65 - invalid */
5845
5846
5847/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
5848FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
5849{
5850 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5851 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
5852}
5853
5854
5855/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
5856FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
5857{
5858 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5859 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
5860}
5861
5862
5863/* Opcode 0xf3 0x0f 0x66 - invalid */
5864
5865
5866/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
5867FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
5868{
5869 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5870 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
5871}
5872
5873
5874/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
5875FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
5876{
5877 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5878 SSE2_OPT_BODY_FullFull_To_Full(packuswb, iemAImpl_packuswb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5879}
5880
5881
5882/* Opcode 0xf3 0x0f 0x67 - invalid */
5883
5884
5885/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
5886 * @note Intel and AMD both uses Qd for the second parameter, however they
5887 * both list it as a mmX/mem64 operand and intel describes it as being
5888 * loaded as a qword, so it should be Qq, shouldn't it? */
5889FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
5890{
5891 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5892 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
5893}
5894
5895
5896/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
5897FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
5898{
5899 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5900 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
5901}
5902
5903
5904/* Opcode 0xf3 0x0f 0x68 - invalid */
5905
5906
5907/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
5908 * @note Intel and AMD both uses Qd for the second parameter, however they
5909 * both list it as a mmX/mem64 operand and intel describes it as being
5910 * loaded as a qword, so it should be Qq, shouldn't it? */
5911FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
5912{
5913 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5914 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
5915}
5916
5917
5918/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
5919FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
5920{
5921 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5922 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
5923
5924}
5925
5926
5927/* Opcode 0xf3 0x0f 0x69 - invalid */
5928
5929
5930/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
5931 * @note Intel and AMD both uses Qd for the second parameter, however they
5932 * both list it as a mmX/mem64 operand and intel describes it as being
5933 * loaded as a qword, so it should be Qq, shouldn't it? */
5934FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
5935{
5936 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5937 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
5938}
5939
5940
5941/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
5942FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
5943{
5944 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5945 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
5946}
5947
5948
5949/* Opcode 0xf3 0x0f 0x6a - invalid */
5950
5951
5952/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
5953FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
5954{
5955 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5956 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
5957}
5958
5959
5960/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
5961FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
5962{
5963 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5964 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
5965}
5966
5967
5968/* Opcode 0xf3 0x0f 0x6b - invalid */
5969
5970
5971/* Opcode 0x0f 0x6c - invalid */
5972
5973
5974/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
5975FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
5976{
5977 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5978 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
5979}
5980
5981
5982/* Opcode 0xf3 0x0f 0x6c - invalid */
5983/* Opcode 0xf2 0x0f 0x6c - invalid */
5984
5985
5986/* Opcode 0x0f 0x6d - invalid */
5987
5988
5989/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
5990FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
5991{
5992 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5993 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
5994}
5995
5996
5997/* Opcode 0xf3 0x0f 0x6d - invalid */
5998
5999
6000FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6001{
6002 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6003 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6004 {
6005 /**
6006 * @opcode 0x6e
6007 * @opcodesub rex.w=1
6008 * @oppfx none
6009 * @opcpuid mmx
6010 * @opgroup og_mmx_datamove
6011 * @opxcpttype 5
6012 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6013 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6014 */
6015 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6016 if (IEM_IS_MODRM_REG_MODE(bRm))
6017 {
6018 /* MMX, greg64 */
6019 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6021 IEM_MC_LOCAL(uint64_t, u64Tmp);
6022
6023 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6024 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6025 IEM_MC_FPU_TO_MMX_MODE();
6026
6027 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6028 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6029
6030 IEM_MC_ADVANCE_RIP_AND_FINISH();
6031 IEM_MC_END();
6032 }
6033 else
6034 {
6035 /* MMX, [mem64] */
6036 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6038 IEM_MC_LOCAL(uint64_t, u64Tmp);
6039
6040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6042 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6043 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6044
6045 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6046 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6047 IEM_MC_FPU_TO_MMX_MODE();
6048
6049 IEM_MC_ADVANCE_RIP_AND_FINISH();
6050 IEM_MC_END();
6051 }
6052 }
6053 else
6054 {
6055 /**
6056 * @opdone
6057 * @opcode 0x6e
6058 * @opcodesub rex.w=0
6059 * @oppfx none
6060 * @opcpuid mmx
6061 * @opgroup og_mmx_datamove
6062 * @opxcpttype 5
6063 * @opfunction iemOp_movd_q_Pd_Ey
6064 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6065 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6066 */
6067 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6068 if (IEM_IS_MODRM_REG_MODE(bRm))
6069 {
6070 /* MMX, greg32 */
6071 IEM_MC_BEGIN(0, 0);
6072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6073 IEM_MC_LOCAL(uint32_t, u32Tmp);
6074
6075 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6076 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6077 IEM_MC_FPU_TO_MMX_MODE();
6078
6079 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6080 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6081
6082 IEM_MC_ADVANCE_RIP_AND_FINISH();
6083 IEM_MC_END();
6084 }
6085 else
6086 {
6087 /* MMX, [mem32] */
6088 IEM_MC_BEGIN(0, 0);
6089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6090 IEM_MC_LOCAL(uint32_t, u32Tmp);
6091
6092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6094 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6095 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6096
6097 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6098 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6099 IEM_MC_FPU_TO_MMX_MODE();
6100
6101 IEM_MC_ADVANCE_RIP_AND_FINISH();
6102 IEM_MC_END();
6103 }
6104 }
6105}
6106
6107FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6108{
6109 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6110 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6111 {
6112 /**
6113 * @opcode 0x6e
6114 * @opcodesub rex.w=1
6115 * @oppfx 0x66
6116 * @opcpuid sse2
6117 * @opgroup og_sse2_simdint_datamove
6118 * @opxcpttype 5
6119 * @optest 64-bit / op1=1 op2=2 -> op1=2
6120 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6121 */
6122 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6123 if (IEM_IS_MODRM_REG_MODE(bRm))
6124 {
6125 /* XMM, greg64 */
6126 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6128 IEM_MC_LOCAL(uint64_t, u64Tmp);
6129
6130 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6131 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6132
6133 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6134 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6135
6136 IEM_MC_ADVANCE_RIP_AND_FINISH();
6137 IEM_MC_END();
6138 }
6139 else
6140 {
6141 /* XMM, [mem64] */
6142 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6144 IEM_MC_LOCAL(uint64_t, u64Tmp);
6145
6146 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6148 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6149 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6150
6151 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6152 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6153
6154 IEM_MC_ADVANCE_RIP_AND_FINISH();
6155 IEM_MC_END();
6156 }
6157 }
6158 else
6159 {
6160 /**
6161 * @opdone
6162 * @opcode 0x6e
6163 * @opcodesub rex.w=0
6164 * @oppfx 0x66
6165 * @opcpuid sse2
6166 * @opgroup og_sse2_simdint_datamove
6167 * @opxcpttype 5
6168 * @opfunction iemOp_movd_q_Vy_Ey
6169 * @optest op1=1 op2=2 -> op1=2
6170 * @optest op1=0 op2=-42 -> op1=-42
6171 */
6172 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6173 if (IEM_IS_MODRM_REG_MODE(bRm))
6174 {
6175 /* XMM, greg32 */
6176 IEM_MC_BEGIN(0, 0);
6177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6178 IEM_MC_LOCAL(uint32_t, u32Tmp);
6179
6180 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6181 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6182
6183 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6184 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6185
6186 IEM_MC_ADVANCE_RIP_AND_FINISH();
6187 IEM_MC_END();
6188 }
6189 else
6190 {
6191 /* XMM, [mem32] */
6192 IEM_MC_BEGIN(0, 0);
6193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6194 IEM_MC_LOCAL(uint32_t, u32Tmp);
6195
6196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6198 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6199 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6200
6201 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6202 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6203
6204 IEM_MC_ADVANCE_RIP_AND_FINISH();
6205 IEM_MC_END();
6206 }
6207 }
6208}
6209
6210/* Opcode 0xf3 0x0f 0x6e - invalid */
6211
6212
6213/**
6214 * @opcode 0x6f
6215 * @oppfx none
6216 * @opcpuid mmx
6217 * @opgroup og_mmx_datamove
6218 * @opxcpttype 5
6219 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6220 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6221 */
6222FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6223{
6224 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6225 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6226 if (IEM_IS_MODRM_REG_MODE(bRm))
6227 {
6228 /*
6229 * Register, register.
6230 */
6231 IEM_MC_BEGIN(0, 0);
6232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6233 IEM_MC_LOCAL(uint64_t, u64Tmp);
6234
6235 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6236 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6237 IEM_MC_FPU_TO_MMX_MODE();
6238
6239 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6240 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6241
6242 IEM_MC_ADVANCE_RIP_AND_FINISH();
6243 IEM_MC_END();
6244 }
6245 else
6246 {
6247 /*
6248 * Register, memory.
6249 */
6250 IEM_MC_BEGIN(0, 0);
6251 IEM_MC_LOCAL(uint64_t, u64Tmp);
6252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6253
6254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6256 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6257 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6258
6259 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6260 IEM_MC_FPU_TO_MMX_MODE();
6261
6262 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6263
6264 IEM_MC_ADVANCE_RIP_AND_FINISH();
6265 IEM_MC_END();
6266 }
6267}
6268
6269/**
6270 * @opcode 0x6f
6271 * @oppfx 0x66
6272 * @opcpuid sse2
6273 * @opgroup og_sse2_simdint_datamove
6274 * @opxcpttype 1
6275 * @optest op1=1 op2=2 -> op1=2
6276 * @optest op1=0 op2=-42 -> op1=-42
6277 */
6278FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6279{
6280 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6281 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6282 if (IEM_IS_MODRM_REG_MODE(bRm))
6283 {
6284 /*
6285 * Register, register.
6286 */
6287 IEM_MC_BEGIN(0, 0);
6288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6289
6290 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6291 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6292
6293 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6294 IEM_GET_MODRM_RM(pVCpu, bRm));
6295 IEM_MC_ADVANCE_RIP_AND_FINISH();
6296 IEM_MC_END();
6297 }
6298 else
6299 {
6300 /*
6301 * Register, memory.
6302 */
6303 IEM_MC_BEGIN(0, 0);
6304 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6306
6307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6309 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6310 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6311
6312 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6313 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6314
6315 IEM_MC_ADVANCE_RIP_AND_FINISH();
6316 IEM_MC_END();
6317 }
6318}
6319
6320/**
6321 * @opcode 0x6f
6322 * @oppfx 0xf3
6323 * @opcpuid sse2
6324 * @opgroup og_sse2_simdint_datamove
6325 * @opxcpttype 4UA
6326 * @optest op1=1 op2=2 -> op1=2
6327 * @optest op1=0 op2=-42 -> op1=-42
6328 */
6329FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6330{
6331 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6332 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6333 if (IEM_IS_MODRM_REG_MODE(bRm))
6334 {
6335 /*
6336 * Register, register.
6337 */
6338 IEM_MC_BEGIN(0, 0);
6339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6340 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6341 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6342 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6343 IEM_GET_MODRM_RM(pVCpu, bRm));
6344 IEM_MC_ADVANCE_RIP_AND_FINISH();
6345 IEM_MC_END();
6346 }
6347 else
6348 {
6349 /*
6350 * Register, memory.
6351 */
6352 IEM_MC_BEGIN(0, 0);
6353 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6355
6356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6358 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6359 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6360 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6361 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6362
6363 IEM_MC_ADVANCE_RIP_AND_FINISH();
6364 IEM_MC_END();
6365 }
6366}
6367
6368
6369/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6370FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6371{
6372 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6374 if (IEM_IS_MODRM_REG_MODE(bRm))
6375 {
6376 /*
6377 * Register, register.
6378 */
6379 IEM_MC_BEGIN(0, 0);
6380 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6382 IEM_MC_ARG(uint64_t *, pDst, 0);
6383 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6384 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6385 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6386 IEM_MC_PREPARE_FPU_USAGE();
6387 IEM_MC_FPU_TO_MMX_MODE();
6388
6389 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6390 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6391 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6392 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6393
6394 IEM_MC_ADVANCE_RIP_AND_FINISH();
6395 IEM_MC_END();
6396 }
6397 else
6398 {
6399 /*
6400 * Register, memory.
6401 */
6402 IEM_MC_BEGIN(0, 0);
6403 IEM_MC_ARG(uint64_t *, pDst, 0);
6404 IEM_MC_LOCAL(uint64_t, uSrc);
6405 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6407
6408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6409 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6410 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6412 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6413 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6414
6415 IEM_MC_PREPARE_FPU_USAGE();
6416 IEM_MC_FPU_TO_MMX_MODE();
6417
6418 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6419 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6420 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6421
6422 IEM_MC_ADVANCE_RIP_AND_FINISH();
6423 IEM_MC_END();
6424 }
6425}
6426
6427
6428/**
6429 * Common worker for SSE2 instructions on the forms:
6430 * pshufd xmm1, xmm2/mem128, imm8
6431 * pshufhw xmm1, xmm2/mem128, imm8
6432 * pshuflw xmm1, xmm2/mem128, imm8
6433 *
6434 * Proper alignment of the 128-bit operand is enforced.
6435 * Exceptions type 4. SSE2 cpuid checks.
6436 */
6437FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6438{
6439 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6440 if (IEM_IS_MODRM_REG_MODE(bRm))
6441 {
6442 /*
6443 * Register, register.
6444 */
6445 IEM_MC_BEGIN(0, 0);
6446 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6448 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6449 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6450 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6451 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6452 IEM_MC_PREPARE_SSE_USAGE();
6453 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6454 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6455 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6456 IEM_MC_ADVANCE_RIP_AND_FINISH();
6457 IEM_MC_END();
6458 }
6459 else
6460 {
6461 /*
6462 * Register, memory.
6463 */
6464 IEM_MC_BEGIN(0, 0);
6465 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6466 IEM_MC_LOCAL(RTUINT128U, uSrc);
6467 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6469
6470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6471 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6472 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6474 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6475
6476 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6477 IEM_MC_PREPARE_SSE_USAGE();
6478 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6479 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6480
6481 IEM_MC_ADVANCE_RIP_AND_FINISH();
6482 IEM_MC_END();
6483 }
6484}
6485
6486
6487/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6488FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6489{
6490 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6491 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6492}
6493
6494
6495/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6496FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6497{
6498 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6499 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6500}
6501
6502
6503/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6504FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6505{
6506 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6507 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6508}
6509
6510
6511/**
6512 * Common worker for MMX instructions of the form:
6513 * psrlw mm, imm8
6514 * psraw mm, imm8
6515 * psllw mm, imm8
6516 * psrld mm, imm8
6517 * psrad mm, imm8
6518 * pslld mm, imm8
6519 * psrlq mm, imm8
6520 * psllq mm, imm8
6521 *
6522 */
6523FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6524{
6525 if (IEM_IS_MODRM_REG_MODE(bRm))
6526 {
6527 /*
6528 * Register, immediate.
6529 */
6530 IEM_MC_BEGIN(0, 0);
6531 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6533 IEM_MC_ARG(uint64_t *, pDst, 0);
6534 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6535 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6536 IEM_MC_PREPARE_FPU_USAGE();
6537 IEM_MC_FPU_TO_MMX_MODE();
6538
6539 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6540 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6541 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6542
6543 IEM_MC_ADVANCE_RIP_AND_FINISH();
6544 IEM_MC_END();
6545 }
6546 else
6547 {
6548 /*
6549 * Register, memory not supported.
6550 */
6551 /// @todo Caller already enforced register mode?!
6552 AssertFailedReturn(VINF_SUCCESS);
6553 }
6554}
6555
6556
6557#if 0 /*unused*/
6558/**
6559 * Common worker for SSE2 instructions of the form:
6560 * psrlw xmm, imm8
6561 * psraw xmm, imm8
6562 * psllw xmm, imm8
6563 * psrld xmm, imm8
6564 * psrad xmm, imm8
6565 * pslld xmm, imm8
6566 * psrlq xmm, imm8
6567 * psllq xmm, imm8
6568 *
6569 */
6570FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6571{
6572 if (IEM_IS_MODRM_REG_MODE(bRm))
6573 {
6574 /*
6575 * Register, immediate.
6576 */
6577 IEM_MC_BEGIN(0, 0);
6578 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6580 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6581 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6582 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6583 IEM_MC_PREPARE_SSE_USAGE();
6584 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6585 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6586 IEM_MC_ADVANCE_RIP_AND_FINISH();
6587 IEM_MC_END();
6588 }
6589 else
6590 {
6591 /*
6592 * Register, memory.
6593 */
6594 /// @todo Caller already enforced register mode?!
6595 AssertFailedReturn(VINF_SUCCESS);
6596 }
6597}
6598#endif
6599
6600
6601/**
6602 * Preprocessor macro variant of iemOpCommonSse2_Shift_Imm
6603 */
6604#define SSE2_SHIFT_BODY_Imm(a_Ins, a_bRm, a_fRegNativeArchs) \
6605 if (IEM_IS_MODRM_REG_MODE((a_bRm))) \
6606 { \
6607 /* \
6608 * Register, immediate. \
6609 */ \
6610 IEM_MC_BEGIN(0, 0); \
6611 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
6612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2); \
6613 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
6614 IEM_MC_PREPARE_SSE_USAGE(); \
6615 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
6616 IEM_MC_NATIVE_EMIT_2(RT_CONCAT3(iemNativeEmit_,a_Ins,_ri_u128), IEM_GET_MODRM_RM(pVCpu, (a_bRm)), bImm); \
6617 } IEM_MC_NATIVE_ELSE() { \
6618 IEM_MC_ARG(PRTUINT128U, pDst, 0); \
6619 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1); \
6620 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, (a_bRm))); \
6621 IEM_MC_CALL_VOID_AIMPL_2(RT_CONCAT3(iemAImpl_,a_Ins,_imm_u128), pDst, bShiftArg); \
6622 } IEM_MC_NATIVE_ENDIF(); \
6623 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6624 IEM_MC_END(); \
6625 } \
6626 else \
6627 { \
6628 /* \
6629 * Register, memory. \
6630 */ \
6631 AssertFailedReturn(VINF_SUCCESS); \
6632 } (void)0
6633
6634
6635/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6636FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6637{
6638// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6639 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6640}
6641
6642
6643/** Opcode 0x66 0x0f 0x71 11/2. */
6644FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6645{
6646// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6647 SSE2_SHIFT_BODY_Imm(psrlw, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6648}
6649
6650
6651/** Opcode 0x0f 0x71 11/4. */
6652FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6653{
6654// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6655 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6656}
6657
6658
6659/** Opcode 0x66 0x0f 0x71 11/4. */
6660FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6661{
6662// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6663 SSE2_SHIFT_BODY_Imm(psraw, bRm, 0);
6664}
6665
6666
6667/** Opcode 0x0f 0x71 11/6. */
6668FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6669{
6670// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6671 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6672}
6673
6674
6675/** Opcode 0x66 0x0f 0x71 11/6. */
6676FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6677{
6678// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6679 SSE2_SHIFT_BODY_Imm(psllw, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6680}
6681
6682
6683/**
6684 * Group 12 jump table for register variant.
6685 */
6686IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6687{
6688 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6689 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6690 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6691 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6692 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6693 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6694 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6695 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6696};
6697AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6698
6699
6700/** Opcode 0x0f 0x71. */
6701FNIEMOP_DEF(iemOp_Grp12)
6702{
6703 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6704 if (IEM_IS_MODRM_REG_MODE(bRm))
6705 /* register, register */
6706 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6707 + pVCpu->iem.s.idxPrefix], bRm);
6708 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6709}
6710
6711
6712/** Opcode 0x0f 0x72 11/2. */
6713FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6714{
6715// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6716 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6717}
6718
6719
6720/** Opcode 0x66 0x0f 0x72 11/2. */
6721FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6722{
6723// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6724 SSE2_SHIFT_BODY_Imm(psrld, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6725}
6726
6727
6728/** Opcode 0x0f 0x72 11/4. */
6729FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6730{
6731// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6732 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6733}
6734
6735
6736/** Opcode 0x66 0x0f 0x72 11/4. */
6737FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6738{
6739// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6740 SSE2_SHIFT_BODY_Imm(psrad, bRm, 0);
6741}
6742
6743
6744/** Opcode 0x0f 0x72 11/6. */
6745FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6746{
6747// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6748 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6749}
6750
6751/** Opcode 0x66 0x0f 0x72 11/6. */
6752FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6753{
6754// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6755 SSE2_SHIFT_BODY_Imm(pslld, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6756}
6757
6758
6759/**
6760 * Group 13 jump table for register variant.
6761 */
6762IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6763{
6764 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6765 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6766 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6767 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6768 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6769 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6770 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6771 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6772};
6773AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6774
6775/** Opcode 0x0f 0x72. */
6776FNIEMOP_DEF(iemOp_Grp13)
6777{
6778 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6779 if (IEM_IS_MODRM_REG_MODE(bRm))
6780 /* register, register */
6781 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6782 + pVCpu->iem.s.idxPrefix], bRm);
6783 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6784}
6785
6786
6787/** Opcode 0x0f 0x73 11/2. */
6788FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6789{
6790// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6791 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6792}
6793
6794
6795/** Opcode 0x66 0x0f 0x73 11/2. */
6796FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6797{
6798// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6799 SSE2_SHIFT_BODY_Imm(psrlq, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6800}
6801
6802
6803/** Opcode 0x66 0x0f 0x73 11/3. */
6804FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6805{
6806// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6807 SSE2_SHIFT_BODY_Imm(psrldq, bRm, 0);
6808}
6809
6810
6811/** Opcode 0x0f 0x73 11/6. */
6812FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6813{
6814// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6815 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6816}
6817
6818
6819/** Opcode 0x66 0x0f 0x73 11/6. */
6820FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6821{
6822// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6823 SSE2_SHIFT_BODY_Imm(psllq, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6824}
6825
6826
6827/** Opcode 0x66 0x0f 0x73 11/7. */
6828FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6829{
6830// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6831 SSE2_SHIFT_BODY_Imm(pslldq, bRm, 0);
6832}
6833
6834/**
6835 * Group 14 jump table for register variant.
6836 */
6837IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6838{
6839 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6840 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6841 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6842 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6843 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6844 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6845 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6846 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6847};
6848AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6849
6850
6851/** Opcode 0x0f 0x73. */
6852FNIEMOP_DEF(iemOp_Grp14)
6853{
6854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6855 if (IEM_IS_MODRM_REG_MODE(bRm))
6856 /* register, register */
6857 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6858 + pVCpu->iem.s.idxPrefix], bRm);
6859 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6860}
6861
6862
6863/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
6864FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
6865{
6866 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6867 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
6868}
6869
6870
6871/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
6872FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
6873{
6874 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6875 SSE2_OPT_BODY_FullFull_To_Full(pcmpeqb, iemAImpl_pcmpeqb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6876}
6877
6878
6879/* Opcode 0xf3 0x0f 0x74 - invalid */
6880/* Opcode 0xf2 0x0f 0x74 - invalid */
6881
6882
6883/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
6884FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
6885{
6886 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6887 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
6888}
6889
6890
6891/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
6892FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
6893{
6894 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6895 SSE2_OPT_BODY_FullFull_To_Full(pcmpeqw, iemAImpl_pcmpeqw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6896}
6897
6898
6899/* Opcode 0xf3 0x0f 0x75 - invalid */
6900/* Opcode 0xf2 0x0f 0x75 - invalid */
6901
6902
6903/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
6904FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
6905{
6906 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6907 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
6908}
6909
6910
6911/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
6912FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
6913{
6914 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6915 SSE2_OPT_BODY_FullFull_To_Full(pcmpeqd, iemAImpl_pcmpeqd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6916}
6917
6918
6919/* Opcode 0xf3 0x0f 0x76 - invalid */
6920/* Opcode 0xf2 0x0f 0x76 - invalid */
6921
6922
6923/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
6924FNIEMOP_DEF(iemOp_emms)
6925{
6926 IEMOP_MNEMONIC(emms, "emms");
6927 IEM_MC_BEGIN(0, 0);
6928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6929 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6930 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6931 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6932 IEM_MC_FPU_FROM_MMX_MODE();
6933 IEM_MC_ADVANCE_RIP_AND_FINISH();
6934 IEM_MC_END();
6935}
6936
6937/* Opcode 0x66 0x0f 0x77 - invalid */
6938/* Opcode 0xf3 0x0f 0x77 - invalid */
6939/* Opcode 0xf2 0x0f 0x77 - invalid */
6940
6941/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
6942#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6943FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
6944{
6945 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
6946 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
6947 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
6948 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
6949
6950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6951 if (IEM_IS_MODRM_REG_MODE(bRm))
6952 {
6953 /*
6954 * Register, register.
6955 */
6956 if (enmEffOpSize == IEMMODE_64BIT)
6957 {
6958 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6959 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6960 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6961 IEM_MC_ARG(uint64_t, u64Enc, 1);
6962 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6963 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6964 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
6965 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
6966 iemCImpl_vmread_reg64, pu64Dst, u64Enc);
6967 IEM_MC_END();
6968 }
6969 else
6970 {
6971 IEM_MC_BEGIN(0, 0);
6972 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6973 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6974 IEM_MC_ARG(uint32_t, u32Enc, 1);
6975 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6976 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6977 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
6978 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
6979 iemCImpl_vmread_reg32, pu64Dst, u32Enc);
6980 IEM_MC_END();
6981 }
6982 }
6983 else
6984 {
6985 /*
6986 * Memory, register.
6987 */
6988 if (enmEffOpSize == IEMMODE_64BIT)
6989 {
6990 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6991 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6993 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6994 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
6995 IEM_MC_ARG(uint64_t, u64Enc, 2);
6996 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6997 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
6998 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
6999 IEM_MC_END();
7000 }
7001 else
7002 {
7003 IEM_MC_BEGIN(0, 0);
7004 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7006 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7007 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7008 IEM_MC_ARG(uint32_t, u32Enc, 2);
7009 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7010 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7011 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7012 IEM_MC_END();
7013 }
7014 }
7015}
7016#else
7017FNIEMOP_UD_STUB(iemOp_vmread_Ey_Gy);
7018#endif
7019
7020/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7021FNIEMOP_STUB(iemOp_AmdGrp17);
7022/* Opcode 0xf3 0x0f 0x78 - invalid */
7023/* Opcode 0xf2 0x0f 0x78 - invalid */
7024
7025/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7026#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7027FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7028{
7029 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7030 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7031 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7032 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7033
7034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7035 if (IEM_IS_MODRM_REG_MODE(bRm))
7036 {
7037 /*
7038 * Register, register.
7039 */
7040 if (enmEffOpSize == IEMMODE_64BIT)
7041 {
7042 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7043 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7044 IEM_MC_ARG(uint64_t, u64Val, 0);
7045 IEM_MC_ARG(uint64_t, u64Enc, 1);
7046 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7047 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7048 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u64Val, u64Enc);
7049 IEM_MC_END();
7050 }
7051 else
7052 {
7053 IEM_MC_BEGIN(0, 0);
7054 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7055 IEM_MC_ARG(uint32_t, u32Val, 0);
7056 IEM_MC_ARG(uint32_t, u32Enc, 1);
7057 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7058 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7059 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u32Val, u32Enc);
7060 IEM_MC_END();
7061 }
7062 }
7063 else
7064 {
7065 /*
7066 * Register, memory.
7067 */
7068 if (enmEffOpSize == IEMMODE_64BIT)
7069 {
7070 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7071 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7073 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7074 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7075 IEM_MC_ARG(uint64_t, u64Enc, 2);
7076 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7077 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7078 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7079 IEM_MC_END();
7080 }
7081 else
7082 {
7083 IEM_MC_BEGIN(0, 0);
7084 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7086 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7087 IEM_MC_ARG(uint32_t, u32Enc, 2);
7088 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7089 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7090 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7091 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7092 IEM_MC_END();
7093 }
7094 }
7095}
7096#else
7097FNIEMOP_UD_STUB(iemOp_vmwrite_Gy_Ey);
7098#endif
7099/* Opcode 0x66 0x0f 0x79 - invalid */
7100/* Opcode 0xf3 0x0f 0x79 - invalid */
7101/* Opcode 0xf2 0x0f 0x79 - invalid */
7102
7103/* Opcode 0x0f 0x7a - invalid */
7104/* Opcode 0x66 0x0f 0x7a - invalid */
7105/* Opcode 0xf3 0x0f 0x7a - invalid */
7106/* Opcode 0xf2 0x0f 0x7a - invalid */
7107
7108/* Opcode 0x0f 0x7b - invalid */
7109/* Opcode 0x66 0x0f 0x7b - invalid */
7110/* Opcode 0xf3 0x0f 0x7b - invalid */
7111/* Opcode 0xf2 0x0f 0x7b - invalid */
7112
7113/* Opcode 0x0f 0x7c - invalid */
7114
7115
7116/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7117FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7118{
7119 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7120 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7121}
7122
7123
7124/* Opcode 0xf3 0x0f 0x7c - invalid */
7125
7126
7127/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7128FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7129{
7130 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7131 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7132}
7133
7134
7135/* Opcode 0x0f 0x7d - invalid */
7136
7137
7138/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7139FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7140{
7141 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7142 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7143}
7144
7145
7146/* Opcode 0xf3 0x0f 0x7d - invalid */
7147
7148
7149/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7150FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7151{
7152 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7153 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7154}
7155
7156
7157/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7158FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7159{
7160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7161 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7162 {
7163 /**
7164 * @opcode 0x7e
7165 * @opcodesub rex.w=1
7166 * @oppfx none
7167 * @opcpuid mmx
7168 * @opgroup og_mmx_datamove
7169 * @opxcpttype 5
7170 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7171 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7172 */
7173 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7174 if (IEM_IS_MODRM_REG_MODE(bRm))
7175 {
7176 /* greg64, MMX */
7177 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7179 IEM_MC_LOCAL(uint64_t, u64Tmp);
7180
7181 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7182 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7183 IEM_MC_FPU_TO_MMX_MODE();
7184
7185 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7186 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7187
7188 IEM_MC_ADVANCE_RIP_AND_FINISH();
7189 IEM_MC_END();
7190 }
7191 else
7192 {
7193 /* [mem64], MMX */
7194 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7196 IEM_MC_LOCAL(uint64_t, u64Tmp);
7197
7198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7200 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7201 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7202
7203 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7204 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7205 IEM_MC_FPU_TO_MMX_MODE();
7206
7207 IEM_MC_ADVANCE_RIP_AND_FINISH();
7208 IEM_MC_END();
7209 }
7210 }
7211 else
7212 {
7213 /**
7214 * @opdone
7215 * @opcode 0x7e
7216 * @opcodesub rex.w=0
7217 * @oppfx none
7218 * @opcpuid mmx
7219 * @opgroup og_mmx_datamove
7220 * @opxcpttype 5
7221 * @opfunction iemOp_movd_q_Pd_Ey
7222 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7223 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7224 */
7225 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7226 if (IEM_IS_MODRM_REG_MODE(bRm))
7227 {
7228 /* greg32, MMX */
7229 IEM_MC_BEGIN(0, 0);
7230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7231 IEM_MC_LOCAL(uint32_t, u32Tmp);
7232
7233 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7234 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7235 IEM_MC_FPU_TO_MMX_MODE();
7236
7237 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm), 0);
7238 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7239
7240 IEM_MC_ADVANCE_RIP_AND_FINISH();
7241 IEM_MC_END();
7242 }
7243 else
7244 {
7245 /* [mem32], MMX */
7246 IEM_MC_BEGIN(0, 0);
7247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7248 IEM_MC_LOCAL(uint32_t, u32Tmp);
7249
7250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7252 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7253 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7254
7255 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm), 0);
7256 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7257 IEM_MC_FPU_TO_MMX_MODE();
7258
7259 IEM_MC_ADVANCE_RIP_AND_FINISH();
7260 IEM_MC_END();
7261 }
7262 }
7263}
7264
7265
7266FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7267{
7268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7269 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7270 {
7271 /**
7272 * @opcode 0x7e
7273 * @opcodesub rex.w=1
7274 * @oppfx 0x66
7275 * @opcpuid sse2
7276 * @opgroup og_sse2_simdint_datamove
7277 * @opxcpttype 5
7278 * @optest 64-bit / op1=1 op2=2 -> op1=2
7279 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7280 */
7281 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7282 if (IEM_IS_MODRM_REG_MODE(bRm))
7283 {
7284 /* greg64, XMM */
7285 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7287 IEM_MC_LOCAL(uint64_t, u64Tmp);
7288
7289 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7290 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7291
7292 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7293 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7294
7295 IEM_MC_ADVANCE_RIP_AND_FINISH();
7296 IEM_MC_END();
7297 }
7298 else
7299 {
7300 /* [mem64], XMM */
7301 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7303 IEM_MC_LOCAL(uint64_t, u64Tmp);
7304
7305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7307 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7308 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7309
7310 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7311 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7312
7313 IEM_MC_ADVANCE_RIP_AND_FINISH();
7314 IEM_MC_END();
7315 }
7316 }
7317 else
7318 {
7319 /**
7320 * @opdone
7321 * @opcode 0x7e
7322 * @opcodesub rex.w=0
7323 * @oppfx 0x66
7324 * @opcpuid sse2
7325 * @opgroup og_sse2_simdint_datamove
7326 * @opxcpttype 5
7327 * @opfunction iemOp_movd_q_Vy_Ey
7328 * @optest op1=1 op2=2 -> op1=2
7329 * @optest op1=0 op2=-42 -> op1=-42
7330 */
7331 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7332 if (IEM_IS_MODRM_REG_MODE(bRm))
7333 {
7334 /* greg32, XMM */
7335 IEM_MC_BEGIN(0, 0);
7336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7337 IEM_MC_LOCAL(uint32_t, u32Tmp);
7338
7339 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7340 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7341
7342 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7343 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7344
7345 IEM_MC_ADVANCE_RIP_AND_FINISH();
7346 IEM_MC_END();
7347 }
7348 else
7349 {
7350 /* [mem32], XMM */
7351 IEM_MC_BEGIN(0, 0);
7352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7353 IEM_MC_LOCAL(uint32_t, u32Tmp);
7354
7355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7357 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7358 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7359
7360 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7361 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7362
7363 IEM_MC_ADVANCE_RIP_AND_FINISH();
7364 IEM_MC_END();
7365 }
7366 }
7367}
7368
7369/**
7370 * @opcode 0x7e
7371 * @oppfx 0xf3
7372 * @opcpuid sse2
7373 * @opgroup og_sse2_pcksclr_datamove
7374 * @opxcpttype none
7375 * @optest op1=1 op2=2 -> op1=2
7376 * @optest op1=0 op2=-42 -> op1=-42
7377 */
7378FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7379{
7380 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7381 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7382 if (IEM_IS_MODRM_REG_MODE(bRm))
7383 {
7384 /*
7385 * XMM128, XMM64.
7386 */
7387 IEM_MC_BEGIN(0, 0);
7388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7389 IEM_MC_LOCAL(uint64_t, uSrc);
7390
7391 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7392 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7393
7394 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7395 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7396
7397 IEM_MC_ADVANCE_RIP_AND_FINISH();
7398 IEM_MC_END();
7399 }
7400 else
7401 {
7402 /*
7403 * XMM128, [mem64].
7404 */
7405 IEM_MC_BEGIN(0, 0);
7406 IEM_MC_LOCAL(uint64_t, uSrc);
7407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7408
7409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7411 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7412 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7413
7414 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7415 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7416
7417 IEM_MC_ADVANCE_RIP_AND_FINISH();
7418 IEM_MC_END();
7419 }
7420}
7421
7422/* Opcode 0xf2 0x0f 0x7e - invalid */
7423
7424
7425/** Opcode 0x0f 0x7f - movq Qq, Pq */
7426FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7427{
7428 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7429 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7430 if (IEM_IS_MODRM_REG_MODE(bRm))
7431 {
7432 /*
7433 * MMX, MMX.
7434 */
7435 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7436 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7437 IEM_MC_BEGIN(0, 0);
7438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7439 IEM_MC_LOCAL(uint64_t, u64Tmp);
7440 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7441 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7442 IEM_MC_FPU_TO_MMX_MODE();
7443
7444 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7445 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7446
7447 IEM_MC_ADVANCE_RIP_AND_FINISH();
7448 IEM_MC_END();
7449 }
7450 else
7451 {
7452 /*
7453 * [mem64], MMX.
7454 */
7455 IEM_MC_BEGIN(0, 0);
7456 IEM_MC_LOCAL(uint64_t, u64Tmp);
7457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7458
7459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7461 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7462 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7463
7464 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7465 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7466 IEM_MC_FPU_TO_MMX_MODE();
7467
7468 IEM_MC_ADVANCE_RIP_AND_FINISH();
7469 IEM_MC_END();
7470 }
7471}
7472
7473/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7474FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7475{
7476 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7477 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7478 if (IEM_IS_MODRM_REG_MODE(bRm))
7479 {
7480 /*
7481 * XMM, XMM.
7482 */
7483 IEM_MC_BEGIN(0, 0);
7484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7485 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7486 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7487 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7488 IEM_GET_MODRM_REG(pVCpu, bRm));
7489 IEM_MC_ADVANCE_RIP_AND_FINISH();
7490 IEM_MC_END();
7491 }
7492 else
7493 {
7494 /*
7495 * [mem128], XMM.
7496 */
7497 IEM_MC_BEGIN(0, 0);
7498 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7500
7501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7503 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7504 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7505
7506 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7507 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7508
7509 IEM_MC_ADVANCE_RIP_AND_FINISH();
7510 IEM_MC_END();
7511 }
7512}
7513
7514/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7515FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7516{
7517 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7518 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7519 if (IEM_IS_MODRM_REG_MODE(bRm))
7520 {
7521 /*
7522 * XMM, XMM.
7523 */
7524 IEM_MC_BEGIN(0, 0);
7525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7526 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7527 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7528 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7529 IEM_GET_MODRM_REG(pVCpu, bRm));
7530 IEM_MC_ADVANCE_RIP_AND_FINISH();
7531 IEM_MC_END();
7532 }
7533 else
7534 {
7535 /*
7536 * [mem128], XMM.
7537 */
7538 IEM_MC_BEGIN(0, 0);
7539 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7541
7542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7544 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7545 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7546
7547 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7548 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7549
7550 IEM_MC_ADVANCE_RIP_AND_FINISH();
7551 IEM_MC_END();
7552 }
7553}
7554
7555/* Opcode 0xf2 0x0f 0x7f - invalid */
7556
7557
7558/**
7559 * @opcode 0x80
7560 * @opfltest of
7561 */
7562FNIEMOP_DEF(iemOp_jo_Jv)
7563{
7564 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7565 IEMOP_HLP_MIN_386();
7566 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7567 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7568 {
7569 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7570 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7572 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7573 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7574 } IEM_MC_ELSE() {
7575 IEM_MC_ADVANCE_RIP_AND_FINISH();
7576 } IEM_MC_ENDIF();
7577 IEM_MC_END();
7578 }
7579 else
7580 {
7581 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7582 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7584 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7585 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7586 } IEM_MC_ELSE() {
7587 IEM_MC_ADVANCE_RIP_AND_FINISH();
7588 } IEM_MC_ENDIF();
7589 IEM_MC_END();
7590 }
7591}
7592
7593
7594/**
7595 * @opcode 0x81
7596 * @opfltest of
7597 */
7598FNIEMOP_DEF(iemOp_jno_Jv)
7599{
7600 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7601 IEMOP_HLP_MIN_386();
7602 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7603 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7604 {
7605 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7606 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7608 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7609 IEM_MC_ADVANCE_RIP_AND_FINISH();
7610 } IEM_MC_ELSE() {
7611 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7612 } IEM_MC_ENDIF();
7613 IEM_MC_END();
7614 }
7615 else
7616 {
7617 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7618 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7620 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7621 IEM_MC_ADVANCE_RIP_AND_FINISH();
7622 } IEM_MC_ELSE() {
7623 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7624 } IEM_MC_ENDIF();
7625 IEM_MC_END();
7626 }
7627}
7628
7629
7630/**
7631 * @opcode 0x82
7632 * @opfltest cf
7633 */
7634FNIEMOP_DEF(iemOp_jc_Jv)
7635{
7636 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7637 IEMOP_HLP_MIN_386();
7638 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7639 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7640 {
7641 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7642 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7644 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7645 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7646 } IEM_MC_ELSE() {
7647 IEM_MC_ADVANCE_RIP_AND_FINISH();
7648 } IEM_MC_ENDIF();
7649 IEM_MC_END();
7650 }
7651 else
7652 {
7653 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7654 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7656 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7657 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7658 } IEM_MC_ELSE() {
7659 IEM_MC_ADVANCE_RIP_AND_FINISH();
7660 } IEM_MC_ENDIF();
7661 IEM_MC_END();
7662 }
7663}
7664
7665
7666/**
7667 * @opcode 0x83
7668 * @opfltest cf
7669 */
7670FNIEMOP_DEF(iemOp_jnc_Jv)
7671{
7672 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7673 IEMOP_HLP_MIN_386();
7674 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7675 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7676 {
7677 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7678 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7680 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7681 IEM_MC_ADVANCE_RIP_AND_FINISH();
7682 } IEM_MC_ELSE() {
7683 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7684 } IEM_MC_ENDIF();
7685 IEM_MC_END();
7686 }
7687 else
7688 {
7689 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7690 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7692 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7693 IEM_MC_ADVANCE_RIP_AND_FINISH();
7694 } IEM_MC_ELSE() {
7695 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7696 } IEM_MC_ENDIF();
7697 IEM_MC_END();
7698 }
7699}
7700
7701
7702/**
7703 * @opcode 0x84
7704 * @opfltest zf
7705 */
7706FNIEMOP_DEF(iemOp_je_Jv)
7707{
7708 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7709 IEMOP_HLP_MIN_386();
7710 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7711 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7712 {
7713 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7714 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7716 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7717 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7718 } IEM_MC_ELSE() {
7719 IEM_MC_ADVANCE_RIP_AND_FINISH();
7720 } IEM_MC_ENDIF();
7721 IEM_MC_END();
7722 }
7723 else
7724 {
7725 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7726 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7728 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7729 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7730 } IEM_MC_ELSE() {
7731 IEM_MC_ADVANCE_RIP_AND_FINISH();
7732 } IEM_MC_ENDIF();
7733 IEM_MC_END();
7734 }
7735}
7736
7737
7738/**
7739 * @opcode 0x85
7740 * @opfltest zf
7741 */
7742FNIEMOP_DEF(iemOp_jne_Jv)
7743{
7744 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7745 IEMOP_HLP_MIN_386();
7746 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7747 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7748 {
7749 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7750 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7752 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7753 IEM_MC_ADVANCE_RIP_AND_FINISH();
7754 } IEM_MC_ELSE() {
7755 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7756 } IEM_MC_ENDIF();
7757 IEM_MC_END();
7758 }
7759 else
7760 {
7761 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7762 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7764 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7765 IEM_MC_ADVANCE_RIP_AND_FINISH();
7766 } IEM_MC_ELSE() {
7767 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7768 } IEM_MC_ENDIF();
7769 IEM_MC_END();
7770 }
7771}
7772
7773
7774/**
7775 * @opcode 0x86
7776 * @opfltest cf,zf
7777 */
7778FNIEMOP_DEF(iemOp_jbe_Jv)
7779{
7780 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7781 IEMOP_HLP_MIN_386();
7782 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7783 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7784 {
7785 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7786 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7788 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7789 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7790 } IEM_MC_ELSE() {
7791 IEM_MC_ADVANCE_RIP_AND_FINISH();
7792 } IEM_MC_ENDIF();
7793 IEM_MC_END();
7794 }
7795 else
7796 {
7797 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7798 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7800 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7801 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7802 } IEM_MC_ELSE() {
7803 IEM_MC_ADVANCE_RIP_AND_FINISH();
7804 } IEM_MC_ENDIF();
7805 IEM_MC_END();
7806 }
7807}
7808
7809
7810/**
7811 * @opcode 0x87
7812 * @opfltest cf,zf
7813 */
7814FNIEMOP_DEF(iemOp_jnbe_Jv)
7815{
7816 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7817 IEMOP_HLP_MIN_386();
7818 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7819 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7820 {
7821 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7822 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7824 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7825 IEM_MC_ADVANCE_RIP_AND_FINISH();
7826 } IEM_MC_ELSE() {
7827 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7828 } IEM_MC_ENDIF();
7829 IEM_MC_END();
7830 }
7831 else
7832 {
7833 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7834 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7836 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7837 IEM_MC_ADVANCE_RIP_AND_FINISH();
7838 } IEM_MC_ELSE() {
7839 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7840 } IEM_MC_ENDIF();
7841 IEM_MC_END();
7842 }
7843}
7844
7845
7846/**
7847 * @opcode 0x88
7848 * @opfltest sf
7849 */
7850FNIEMOP_DEF(iemOp_js_Jv)
7851{
7852 IEMOP_MNEMONIC(js_Jv, "js Jv");
7853 IEMOP_HLP_MIN_386();
7854 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7855 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7856 {
7857 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7858 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7860 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7861 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7862 } IEM_MC_ELSE() {
7863 IEM_MC_ADVANCE_RIP_AND_FINISH();
7864 } IEM_MC_ENDIF();
7865 IEM_MC_END();
7866 }
7867 else
7868 {
7869 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7870 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7872 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7873 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7874 } IEM_MC_ELSE() {
7875 IEM_MC_ADVANCE_RIP_AND_FINISH();
7876 } IEM_MC_ENDIF();
7877 IEM_MC_END();
7878 }
7879}
7880
7881
7882/**
7883 * @opcode 0x89
7884 * @opfltest sf
7885 */
7886FNIEMOP_DEF(iemOp_jns_Jv)
7887{
7888 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
7889 IEMOP_HLP_MIN_386();
7890 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7891 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7892 {
7893 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7894 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7896 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7897 IEM_MC_ADVANCE_RIP_AND_FINISH();
7898 } IEM_MC_ELSE() {
7899 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7900 } IEM_MC_ENDIF();
7901 IEM_MC_END();
7902 }
7903 else
7904 {
7905 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7906 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7908 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7909 IEM_MC_ADVANCE_RIP_AND_FINISH();
7910 } IEM_MC_ELSE() {
7911 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7912 } IEM_MC_ENDIF();
7913 IEM_MC_END();
7914 }
7915}
7916
7917
7918/**
7919 * @opcode 0x8a
7920 * @opfltest pf
7921 */
7922FNIEMOP_DEF(iemOp_jp_Jv)
7923{
7924 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
7925 IEMOP_HLP_MIN_386();
7926 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7927 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7928 {
7929 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7930 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7932 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7933 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7934 } IEM_MC_ELSE() {
7935 IEM_MC_ADVANCE_RIP_AND_FINISH();
7936 } IEM_MC_ENDIF();
7937 IEM_MC_END();
7938 }
7939 else
7940 {
7941 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7942 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7944 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7945 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7946 } IEM_MC_ELSE() {
7947 IEM_MC_ADVANCE_RIP_AND_FINISH();
7948 } IEM_MC_ENDIF();
7949 IEM_MC_END();
7950 }
7951}
7952
7953
7954/**
7955 * @opcode 0x8b
7956 * @opfltest pf
7957 */
7958FNIEMOP_DEF(iemOp_jnp_Jv)
7959{
7960 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
7961 IEMOP_HLP_MIN_386();
7962 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7963 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7964 {
7965 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7966 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7968 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7969 IEM_MC_ADVANCE_RIP_AND_FINISH();
7970 } IEM_MC_ELSE() {
7971 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7972 } IEM_MC_ENDIF();
7973 IEM_MC_END();
7974 }
7975 else
7976 {
7977 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7978 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7980 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7981 IEM_MC_ADVANCE_RIP_AND_FINISH();
7982 } IEM_MC_ELSE() {
7983 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7984 } IEM_MC_ENDIF();
7985 IEM_MC_END();
7986 }
7987}
7988
7989
7990/**
7991 * @opcode 0x8c
7992 * @opfltest sf,of
7993 */
7994FNIEMOP_DEF(iemOp_jl_Jv)
7995{
7996 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
7997 IEMOP_HLP_MIN_386();
7998 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7999 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8000 {
8001 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8002 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8004 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8005 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8006 } IEM_MC_ELSE() {
8007 IEM_MC_ADVANCE_RIP_AND_FINISH();
8008 } IEM_MC_ENDIF();
8009 IEM_MC_END();
8010 }
8011 else
8012 {
8013 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8014 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8016 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8017 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8018 } IEM_MC_ELSE() {
8019 IEM_MC_ADVANCE_RIP_AND_FINISH();
8020 } IEM_MC_ENDIF();
8021 IEM_MC_END();
8022 }
8023}
8024
8025
8026/**
8027 * @opcode 0x8d
8028 * @opfltest sf,of
8029 */
8030FNIEMOP_DEF(iemOp_jnl_Jv)
8031{
8032 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8033 IEMOP_HLP_MIN_386();
8034 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8035 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8036 {
8037 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8038 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8040 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8041 IEM_MC_ADVANCE_RIP_AND_FINISH();
8042 } IEM_MC_ELSE() {
8043 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8044 } IEM_MC_ENDIF();
8045 IEM_MC_END();
8046 }
8047 else
8048 {
8049 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8050 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8052 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8053 IEM_MC_ADVANCE_RIP_AND_FINISH();
8054 } IEM_MC_ELSE() {
8055 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8056 } IEM_MC_ENDIF();
8057 IEM_MC_END();
8058 }
8059}
8060
8061
8062/**
8063 * @opcode 0x8e
8064 * @opfltest zf,sf,of
8065 */
8066FNIEMOP_DEF(iemOp_jle_Jv)
8067{
8068 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8069 IEMOP_HLP_MIN_386();
8070 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8071 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8072 {
8073 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8074 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8076 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8077 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8078 } IEM_MC_ELSE() {
8079 IEM_MC_ADVANCE_RIP_AND_FINISH();
8080 } IEM_MC_ENDIF();
8081 IEM_MC_END();
8082 }
8083 else
8084 {
8085 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8086 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8088 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8089 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8090 } IEM_MC_ELSE() {
8091 IEM_MC_ADVANCE_RIP_AND_FINISH();
8092 } IEM_MC_ENDIF();
8093 IEM_MC_END();
8094 }
8095}
8096
8097
8098/**
8099 * @opcode 0x8f
8100 * @opfltest zf,sf,of
8101 */
8102FNIEMOP_DEF(iemOp_jnle_Jv)
8103{
8104 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8105 IEMOP_HLP_MIN_386();
8106 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8107 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8108 {
8109 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8110 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8112 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8113 IEM_MC_ADVANCE_RIP_AND_FINISH();
8114 } IEM_MC_ELSE() {
8115 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8116 } IEM_MC_ENDIF();
8117 IEM_MC_END();
8118 }
8119 else
8120 {
8121 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8122 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8124 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8125 IEM_MC_ADVANCE_RIP_AND_FINISH();
8126 } IEM_MC_ELSE() {
8127 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8128 } IEM_MC_ENDIF();
8129 IEM_MC_END();
8130 }
8131}
8132
8133
8134/**
8135 * @opcode 0x90
8136 * @opfltest of
8137 */
8138FNIEMOP_DEF(iemOp_seto_Eb)
8139{
8140 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8141 IEMOP_HLP_MIN_386();
8142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8143
8144 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8145 * any way. AMD says it's "unused", whatever that means. We're
8146 * ignoring for now. */
8147 if (IEM_IS_MODRM_REG_MODE(bRm))
8148 {
8149 /* register target */
8150 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8152 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8153 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8154 } IEM_MC_ELSE() {
8155 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8156 } IEM_MC_ENDIF();
8157 IEM_MC_ADVANCE_RIP_AND_FINISH();
8158 IEM_MC_END();
8159 }
8160 else
8161 {
8162 /* memory target */
8163 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8167 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8168 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8169 } IEM_MC_ELSE() {
8170 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8171 } IEM_MC_ENDIF();
8172 IEM_MC_ADVANCE_RIP_AND_FINISH();
8173 IEM_MC_END();
8174 }
8175}
8176
8177
8178/**
8179 * @opcode 0x91
8180 * @opfltest of
8181 */
8182FNIEMOP_DEF(iemOp_setno_Eb)
8183{
8184 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8185 IEMOP_HLP_MIN_386();
8186 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8187
8188 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8189 * any way. AMD says it's "unused", whatever that means. We're
8190 * ignoring for now. */
8191 if (IEM_IS_MODRM_REG_MODE(bRm))
8192 {
8193 /* register target */
8194 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8196 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8197 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8198 } IEM_MC_ELSE() {
8199 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8200 } IEM_MC_ENDIF();
8201 IEM_MC_ADVANCE_RIP_AND_FINISH();
8202 IEM_MC_END();
8203 }
8204 else
8205 {
8206 /* memory target */
8207 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8211 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8212 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8213 } IEM_MC_ELSE() {
8214 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8215 } IEM_MC_ENDIF();
8216 IEM_MC_ADVANCE_RIP_AND_FINISH();
8217 IEM_MC_END();
8218 }
8219}
8220
8221
8222/**
8223 * @opcode 0x92
8224 * @opfltest cf
8225 */
8226FNIEMOP_DEF(iemOp_setc_Eb)
8227{
8228 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8229 IEMOP_HLP_MIN_386();
8230 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8231
8232 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8233 * any way. AMD says it's "unused", whatever that means. We're
8234 * ignoring for now. */
8235 if (IEM_IS_MODRM_REG_MODE(bRm))
8236 {
8237 /* register target */
8238 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8240 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8241 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8242 } IEM_MC_ELSE() {
8243 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8244 } IEM_MC_ENDIF();
8245 IEM_MC_ADVANCE_RIP_AND_FINISH();
8246 IEM_MC_END();
8247 }
8248 else
8249 {
8250 /* memory target */
8251 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8255 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8256 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8257 } IEM_MC_ELSE() {
8258 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8259 } IEM_MC_ENDIF();
8260 IEM_MC_ADVANCE_RIP_AND_FINISH();
8261 IEM_MC_END();
8262 }
8263}
8264
8265
8266/**
8267 * @opcode 0x93
8268 * @opfltest cf
8269 */
8270FNIEMOP_DEF(iemOp_setnc_Eb)
8271{
8272 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8273 IEMOP_HLP_MIN_386();
8274 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8275
8276 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8277 * any way. AMD says it's "unused", whatever that means. We're
8278 * ignoring for now. */
8279 if (IEM_IS_MODRM_REG_MODE(bRm))
8280 {
8281 /* register target */
8282 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8284 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8285 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8286 } IEM_MC_ELSE() {
8287 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8288 } IEM_MC_ENDIF();
8289 IEM_MC_ADVANCE_RIP_AND_FINISH();
8290 IEM_MC_END();
8291 }
8292 else
8293 {
8294 /* memory target */
8295 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8299 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8300 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8301 } IEM_MC_ELSE() {
8302 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8303 } IEM_MC_ENDIF();
8304 IEM_MC_ADVANCE_RIP_AND_FINISH();
8305 IEM_MC_END();
8306 }
8307}
8308
8309
8310/**
8311 * @opcode 0x94
8312 * @opfltest zf
8313 */
8314FNIEMOP_DEF(iemOp_sete_Eb)
8315{
8316 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8317 IEMOP_HLP_MIN_386();
8318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8319
8320 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8321 * any way. AMD says it's "unused", whatever that means. We're
8322 * ignoring for now. */
8323 if (IEM_IS_MODRM_REG_MODE(bRm))
8324 {
8325 /* register target */
8326 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8328 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8329 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8330 } IEM_MC_ELSE() {
8331 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8332 } IEM_MC_ENDIF();
8333 IEM_MC_ADVANCE_RIP_AND_FINISH();
8334 IEM_MC_END();
8335 }
8336 else
8337 {
8338 /* memory target */
8339 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8343 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8344 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8345 } IEM_MC_ELSE() {
8346 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8347 } IEM_MC_ENDIF();
8348 IEM_MC_ADVANCE_RIP_AND_FINISH();
8349 IEM_MC_END();
8350 }
8351}
8352
8353
8354/**
8355 * @opcode 0x95
8356 * @opfltest zf
8357 */
8358FNIEMOP_DEF(iemOp_setne_Eb)
8359{
8360 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8361 IEMOP_HLP_MIN_386();
8362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8363
8364 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8365 * any way. AMD says it's "unused", whatever that means. We're
8366 * ignoring for now. */
8367 if (IEM_IS_MODRM_REG_MODE(bRm))
8368 {
8369 /* register target */
8370 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8372 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8373 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8374 } IEM_MC_ELSE() {
8375 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8376 } IEM_MC_ENDIF();
8377 IEM_MC_ADVANCE_RIP_AND_FINISH();
8378 IEM_MC_END();
8379 }
8380 else
8381 {
8382 /* memory target */
8383 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8387 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8388 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8389 } IEM_MC_ELSE() {
8390 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8391 } IEM_MC_ENDIF();
8392 IEM_MC_ADVANCE_RIP_AND_FINISH();
8393 IEM_MC_END();
8394 }
8395}
8396
8397
8398/**
8399 * @opcode 0x96
8400 * @opfltest cf,zf
8401 */
8402FNIEMOP_DEF(iemOp_setbe_Eb)
8403{
8404 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8405 IEMOP_HLP_MIN_386();
8406 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8407
8408 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8409 * any way. AMD says it's "unused", whatever that means. We're
8410 * ignoring for now. */
8411 if (IEM_IS_MODRM_REG_MODE(bRm))
8412 {
8413 /* register target */
8414 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8416 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8417 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8418 } IEM_MC_ELSE() {
8419 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8420 } IEM_MC_ENDIF();
8421 IEM_MC_ADVANCE_RIP_AND_FINISH();
8422 IEM_MC_END();
8423 }
8424 else
8425 {
8426 /* memory target */
8427 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8431 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8432 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8433 } IEM_MC_ELSE() {
8434 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8435 } IEM_MC_ENDIF();
8436 IEM_MC_ADVANCE_RIP_AND_FINISH();
8437 IEM_MC_END();
8438 }
8439}
8440
8441
8442/**
8443 * @opcode 0x97
8444 * @opfltest cf,zf
8445 */
8446FNIEMOP_DEF(iemOp_setnbe_Eb)
8447{
8448 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8449 IEMOP_HLP_MIN_386();
8450 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8451
8452 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8453 * any way. AMD says it's "unused", whatever that means. We're
8454 * ignoring for now. */
8455 if (IEM_IS_MODRM_REG_MODE(bRm))
8456 {
8457 /* register target */
8458 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8460 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8461 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8462 } IEM_MC_ELSE() {
8463 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8464 } IEM_MC_ENDIF();
8465 IEM_MC_ADVANCE_RIP_AND_FINISH();
8466 IEM_MC_END();
8467 }
8468 else
8469 {
8470 /* memory target */
8471 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8475 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8476 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8477 } IEM_MC_ELSE() {
8478 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8479 } IEM_MC_ENDIF();
8480 IEM_MC_ADVANCE_RIP_AND_FINISH();
8481 IEM_MC_END();
8482 }
8483}
8484
8485
8486/**
8487 * @opcode 0x98
8488 * @opfltest sf
8489 */
8490FNIEMOP_DEF(iemOp_sets_Eb)
8491{
8492 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8493 IEMOP_HLP_MIN_386();
8494 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8495
8496 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8497 * any way. AMD says it's "unused", whatever that means. We're
8498 * ignoring for now. */
8499 if (IEM_IS_MODRM_REG_MODE(bRm))
8500 {
8501 /* register target */
8502 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8504 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8505 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8506 } IEM_MC_ELSE() {
8507 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8508 } IEM_MC_ENDIF();
8509 IEM_MC_ADVANCE_RIP_AND_FINISH();
8510 IEM_MC_END();
8511 }
8512 else
8513 {
8514 /* memory target */
8515 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8519 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8520 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8521 } IEM_MC_ELSE() {
8522 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8523 } IEM_MC_ENDIF();
8524 IEM_MC_ADVANCE_RIP_AND_FINISH();
8525 IEM_MC_END();
8526 }
8527}
8528
8529
8530/**
8531 * @opcode 0x99
8532 * @opfltest sf
8533 */
8534FNIEMOP_DEF(iemOp_setns_Eb)
8535{
8536 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8537 IEMOP_HLP_MIN_386();
8538 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8539
8540 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8541 * any way. AMD says it's "unused", whatever that means. We're
8542 * ignoring for now. */
8543 if (IEM_IS_MODRM_REG_MODE(bRm))
8544 {
8545 /* register target */
8546 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8548 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8549 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8550 } IEM_MC_ELSE() {
8551 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8552 } IEM_MC_ENDIF();
8553 IEM_MC_ADVANCE_RIP_AND_FINISH();
8554 IEM_MC_END();
8555 }
8556 else
8557 {
8558 /* memory target */
8559 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8563 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8564 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8565 } IEM_MC_ELSE() {
8566 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8567 } IEM_MC_ENDIF();
8568 IEM_MC_ADVANCE_RIP_AND_FINISH();
8569 IEM_MC_END();
8570 }
8571}
8572
8573
8574/**
8575 * @opcode 0x9a
8576 * @opfltest pf
8577 */
8578FNIEMOP_DEF(iemOp_setp_Eb)
8579{
8580 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8581 IEMOP_HLP_MIN_386();
8582 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8583
8584 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8585 * any way. AMD says it's "unused", whatever that means. We're
8586 * ignoring for now. */
8587 if (IEM_IS_MODRM_REG_MODE(bRm))
8588 {
8589 /* register target */
8590 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8592 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8593 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8594 } IEM_MC_ELSE() {
8595 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8596 } IEM_MC_ENDIF();
8597 IEM_MC_ADVANCE_RIP_AND_FINISH();
8598 IEM_MC_END();
8599 }
8600 else
8601 {
8602 /* memory target */
8603 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8607 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8608 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8609 } IEM_MC_ELSE() {
8610 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8611 } IEM_MC_ENDIF();
8612 IEM_MC_ADVANCE_RIP_AND_FINISH();
8613 IEM_MC_END();
8614 }
8615}
8616
8617
8618/**
8619 * @opcode 0x9b
8620 * @opfltest pf
8621 */
8622FNIEMOP_DEF(iemOp_setnp_Eb)
8623{
8624 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8625 IEMOP_HLP_MIN_386();
8626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8627
8628 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8629 * any way. AMD says it's "unused", whatever that means. We're
8630 * ignoring for now. */
8631 if (IEM_IS_MODRM_REG_MODE(bRm))
8632 {
8633 /* register target */
8634 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8636 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8637 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8638 } IEM_MC_ELSE() {
8639 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8640 } IEM_MC_ENDIF();
8641 IEM_MC_ADVANCE_RIP_AND_FINISH();
8642 IEM_MC_END();
8643 }
8644 else
8645 {
8646 /* memory target */
8647 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8651 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8652 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8653 } IEM_MC_ELSE() {
8654 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8655 } IEM_MC_ENDIF();
8656 IEM_MC_ADVANCE_RIP_AND_FINISH();
8657 IEM_MC_END();
8658 }
8659}
8660
8661
8662/**
8663 * @opcode 0x9c
8664 * @opfltest sf,of
8665 */
8666FNIEMOP_DEF(iemOp_setl_Eb)
8667{
8668 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8669 IEMOP_HLP_MIN_386();
8670 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8671
8672 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8673 * any way. AMD says it's "unused", whatever that means. We're
8674 * ignoring for now. */
8675 if (IEM_IS_MODRM_REG_MODE(bRm))
8676 {
8677 /* register target */
8678 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8680 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8681 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8682 } IEM_MC_ELSE() {
8683 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8684 } IEM_MC_ENDIF();
8685 IEM_MC_ADVANCE_RIP_AND_FINISH();
8686 IEM_MC_END();
8687 }
8688 else
8689 {
8690 /* memory target */
8691 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8695 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8696 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8697 } IEM_MC_ELSE() {
8698 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8699 } IEM_MC_ENDIF();
8700 IEM_MC_ADVANCE_RIP_AND_FINISH();
8701 IEM_MC_END();
8702 }
8703}
8704
8705
8706/**
8707 * @opcode 0x9d
8708 * @opfltest sf,of
8709 */
8710FNIEMOP_DEF(iemOp_setnl_Eb)
8711{
8712 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8713 IEMOP_HLP_MIN_386();
8714 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8715
8716 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8717 * any way. AMD says it's "unused", whatever that means. We're
8718 * ignoring for now. */
8719 if (IEM_IS_MODRM_REG_MODE(bRm))
8720 {
8721 /* register target */
8722 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8724 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8725 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8726 } IEM_MC_ELSE() {
8727 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8728 } IEM_MC_ENDIF();
8729 IEM_MC_ADVANCE_RIP_AND_FINISH();
8730 IEM_MC_END();
8731 }
8732 else
8733 {
8734 /* memory target */
8735 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8739 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8740 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8741 } IEM_MC_ELSE() {
8742 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8743 } IEM_MC_ENDIF();
8744 IEM_MC_ADVANCE_RIP_AND_FINISH();
8745 IEM_MC_END();
8746 }
8747}
8748
8749
8750/**
8751 * @opcode 0x9e
8752 * @opfltest zf,sf,of
8753 */
8754FNIEMOP_DEF(iemOp_setle_Eb)
8755{
8756 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8757 IEMOP_HLP_MIN_386();
8758 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8759
8760 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8761 * any way. AMD says it's "unused", whatever that means. We're
8762 * ignoring for now. */
8763 if (IEM_IS_MODRM_REG_MODE(bRm))
8764 {
8765 /* register target */
8766 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8768 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8769 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8770 } IEM_MC_ELSE() {
8771 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8772 } IEM_MC_ENDIF();
8773 IEM_MC_ADVANCE_RIP_AND_FINISH();
8774 IEM_MC_END();
8775 }
8776 else
8777 {
8778 /* memory target */
8779 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8783 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8784 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8785 } IEM_MC_ELSE() {
8786 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8787 } IEM_MC_ENDIF();
8788 IEM_MC_ADVANCE_RIP_AND_FINISH();
8789 IEM_MC_END();
8790 }
8791}
8792
8793
8794/**
8795 * @opcode 0x9f
8796 * @opfltest zf,sf,of
8797 */
8798FNIEMOP_DEF(iemOp_setnle_Eb)
8799{
8800 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8801 IEMOP_HLP_MIN_386();
8802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8803
8804 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8805 * any way. AMD says it's "unused", whatever that means. We're
8806 * ignoring for now. */
8807 if (IEM_IS_MODRM_REG_MODE(bRm))
8808 {
8809 /* register target */
8810 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8812 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8813 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8814 } IEM_MC_ELSE() {
8815 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8816 } IEM_MC_ENDIF();
8817 IEM_MC_ADVANCE_RIP_AND_FINISH();
8818 IEM_MC_END();
8819 }
8820 else
8821 {
8822 /* memory target */
8823 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8827 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8828 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8829 } IEM_MC_ELSE() {
8830 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8831 } IEM_MC_ENDIF();
8832 IEM_MC_ADVANCE_RIP_AND_FINISH();
8833 IEM_MC_END();
8834 }
8835}
8836
8837
8838/** Opcode 0x0f 0xa0. */
8839FNIEMOP_DEF(iemOp_push_fs)
8840{
8841 IEMOP_MNEMONIC(push_fs, "push fs");
8842 IEMOP_HLP_MIN_386();
8843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8844 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8845}
8846
8847
8848/** Opcode 0x0f 0xa1. */
8849FNIEMOP_DEF(iemOp_pop_fs)
8850{
8851 IEMOP_MNEMONIC(pop_fs, "pop fs");
8852 IEMOP_HLP_MIN_386();
8853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8854 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8855 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
8856 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8857 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8858 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8859 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8860 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS),
8861 iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8862}
8863
8864
8865/** Opcode 0x0f 0xa2. */
8866FNIEMOP_DEF(iemOp_cpuid)
8867{
8868 IEMOP_MNEMONIC(cpuid, "cpuid");
8869 IEMOP_HLP_MIN_486(); /* not all 486es. */
8870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8871 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
8872 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8873 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
8874 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
8875 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX),
8876 iemCImpl_cpuid);
8877}
8878
8879
8880/**
8881 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8882 * iemOp_bts_Ev_Gv.
8883 */
8884
8885#define IEMOP_BODY_BIT_Ev_Gv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
8886 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8887 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8888 \
8889 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8890 { \
8891 /* register destination. */ \
8892 switch (pVCpu->iem.s.enmEffOpSize) \
8893 { \
8894 case IEMMODE_16BIT: \
8895 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8897 \
8898 IEM_MC_ARG(uint16_t, u16Src, 2); \
8899 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8900 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
8901 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
8902 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8903 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8904 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
8905 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8906 \
8907 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8908 IEM_MC_END(); \
8909 break; \
8910 \
8911 case IEMMODE_32BIT: \
8912 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8914 \
8915 IEM_MC_ARG(uint32_t, u32Src, 2); \
8916 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8917 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
8918 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
8919 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8920 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8921 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
8922 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8923 \
8924 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8925 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8926 IEM_MC_END(); \
8927 break; \
8928 \
8929 case IEMMODE_64BIT: \
8930 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
8931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8932 \
8933 IEM_MC_ARG(uint64_t, u64Src, 2); \
8934 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8935 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
8936 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
8937 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8938 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8939 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
8940 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8941 \
8942 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8943 IEM_MC_END(); \
8944 break; \
8945 \
8946 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8947 } \
8948 } \
8949 else \
8950 { \
8951 /* memory destination. */ \
8952 /** @todo test negative bit offsets! */ \
8953 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
8954 { \
8955 switch (pVCpu->iem.s.enmEffOpSize) \
8956 { \
8957 case IEMMODE_16BIT: \
8958 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8961 IEMOP_HLP_DONE_DECODING(); \
8962 \
8963 IEM_MC_ARG(uint16_t, u16Src, 2); \
8964 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8965 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
8966 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
8967 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
8968 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
8969 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
8970 \
8971 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8972 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
8973 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8974 \
8975 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8976 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
8977 \
8978 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8979 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8980 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8981 IEM_MC_END(); \
8982 break; \
8983 \
8984 case IEMMODE_32BIT: \
8985 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8988 IEMOP_HLP_DONE_DECODING(); \
8989 \
8990 IEM_MC_ARG(uint32_t, u32Src, 2); \
8991 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8992 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
8993 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
8994 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
8995 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
8996 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
8997 \
8998 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8999 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
9000 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9001 \
9002 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9003 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
9004 \
9005 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9006 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9007 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9008 IEM_MC_END(); \
9009 break; \
9010 \
9011 case IEMMODE_64BIT: \
9012 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9015 IEMOP_HLP_DONE_DECODING(); \
9016 \
9017 IEM_MC_ARG(uint64_t, u64Src, 2); \
9018 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9019 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9020 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9021 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9022 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9023 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9024 \
9025 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9026 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
9027 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9028 \
9029 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9030 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
9031 \
9032 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9033 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9034 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9035 IEM_MC_END(); \
9036 break; \
9037 \
9038 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9039 } \
9040 } \
9041 else \
9042 { \
9043 (void)0
9044/* Separate macro to work around parsing issue in IEMAllInstPython.py */
9045#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
9046 switch (pVCpu->iem.s.enmEffOpSize) \
9047 { \
9048 case IEMMODE_16BIT: \
9049 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9052 IEMOP_HLP_DONE_DECODING(); \
9053 \
9054 IEM_MC_ARG(uint16_t, u16Src, 2); \
9055 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9056 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9057 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9058 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9059 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9060 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9061 \
9062 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9063 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
9064 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9065 \
9066 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9067 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU16, fEFlagsIn, pu16Dst, u16Src); \
9068 \
9069 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9070 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9071 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9072 IEM_MC_END(); \
9073 break; \
9074 \
9075 case IEMMODE_32BIT: \
9076 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9079 IEMOP_HLP_DONE_DECODING(); \
9080 \
9081 IEM_MC_ARG(uint32_t, u32Src, 2); \
9082 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9083 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9084 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9085 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9086 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9087 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9088 \
9089 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9090 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
9091 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9092 \
9093 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9094 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU32, fEFlagsIn, pu32Dst, u32Src); \
9095 \
9096 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9097 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9098 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9099 IEM_MC_END(); \
9100 break; \
9101 \
9102 case IEMMODE_64BIT: \
9103 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9105 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9106 IEMOP_HLP_DONE_DECODING(); \
9107 \
9108 IEM_MC_ARG(uint64_t, u64Src, 2); \
9109 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9110 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9111 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9112 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9113 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9114 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9115 \
9116 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9117 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
9118 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9119 \
9120 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9121 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU64, fEFlagsIn, pu64Dst, u64Src); \
9122 \
9123 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9124 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9125 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9126 IEM_MC_END(); \
9127 break; \
9128 \
9129 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9130 } \
9131 } \
9132 } \
9133 (void)0
9134
9135/* Read-only version (bt). */
9136#define IEMOP_BODY_BIT_Ev_Gv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
9137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9138 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
9139 \
9140 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9141 { \
9142 /* register destination. */ \
9143 switch (pVCpu->iem.s.enmEffOpSize) \
9144 { \
9145 case IEMMODE_16BIT: \
9146 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9148 \
9149 IEM_MC_ARG(uint16_t, u16Src, 2); \
9150 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9151 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9152 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
9153 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9154 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9155 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
9156 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9157 \
9158 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9159 IEM_MC_END(); \
9160 break; \
9161 \
9162 case IEMMODE_32BIT: \
9163 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9165 \
9166 IEM_MC_ARG(uint32_t, u32Src, 2); \
9167 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9168 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9169 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
9170 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9171 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9172 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
9173 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9174 \
9175 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9176 IEM_MC_END(); \
9177 break; \
9178 \
9179 case IEMMODE_64BIT: \
9180 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9182 \
9183 IEM_MC_ARG(uint64_t, u64Src, 2); \
9184 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9185 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9186 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
9187 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9188 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9189 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
9190 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9191 \
9192 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9193 IEM_MC_END(); \
9194 break; \
9195 \
9196 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9197 } \
9198 } \
9199 else \
9200 { \
9201 /* memory destination. */ \
9202 /** @todo test negative bit offsets! */ \
9203 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9204 { \
9205 switch (pVCpu->iem.s.enmEffOpSize) \
9206 { \
9207 case IEMMODE_16BIT: \
9208 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9211 IEMOP_HLP_DONE_DECODING(); \
9212 \
9213 IEM_MC_ARG(uint16_t, u16Src, 2); \
9214 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9215 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9216 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9217 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9218 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9219 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9220 \
9221 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9222 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
9223 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9224 \
9225 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9226 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
9227 \
9228 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9229 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9230 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9231 IEM_MC_END(); \
9232 break; \
9233 \
9234 case IEMMODE_32BIT: \
9235 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9238 IEMOP_HLP_DONE_DECODING(); \
9239 \
9240 IEM_MC_ARG(uint32_t, u32Src, 2); \
9241 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9242 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9243 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9244 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9245 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9246 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9247 \
9248 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
9249 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9250 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9251 \
9252 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9253 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
9254 \
9255 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9256 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9257 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9258 IEM_MC_END(); \
9259 break; \
9260 \
9261 case IEMMODE_64BIT: \
9262 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9265 IEMOP_HLP_DONE_DECODING(); \
9266 \
9267 IEM_MC_ARG(uint64_t, u64Src, 2); \
9268 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9269 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9270 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9271 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9272 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9273 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9274 \
9275 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9276 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
9277 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9278 \
9279 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9280 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
9281 \
9282 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9283 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9284 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9285 IEM_MC_END(); \
9286 break; \
9287 \
9288 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9289 } \
9290 } \
9291 else \
9292 { \
9293 IEMOP_HLP_DONE_DECODING(); \
9294 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9295 } \
9296 } \
9297 (void)0
9298
9299
9300/**
9301 * @opcode 0xa3
9302 * @oppfx n/a
9303 * @opflclass bitmap
9304 */
9305FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9306{
9307 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9308 IEMOP_HLP_MIN_386();
9309 IEMOP_BODY_BIT_Ev_Gv_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
9310}
9311
9312
9313/**
9314 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9315 */
9316#define IEMOP_BODY_SHLD_SHR_Ib(a_pImplExpr) \
9317 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9318 \
9319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9320 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9321 \
9322 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9323 { \
9324 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9325 \
9326 switch (pVCpu->iem.s.enmEffOpSize) \
9327 { \
9328 case IEMMODE_16BIT: \
9329 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9331 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9332 IEM_MC_ARG(uint16_t, u16Src, 1); \
9333 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9334 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9335 \
9336 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9337 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9338 IEM_MC_REF_EFLAGS(pEFlags); \
9339 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9340 \
9341 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9342 IEM_MC_END(); \
9343 break; \
9344 \
9345 case IEMMODE_32BIT: \
9346 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9348 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9349 IEM_MC_ARG(uint32_t, u32Src, 1); \
9350 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9351 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9352 \
9353 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9354 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9355 IEM_MC_REF_EFLAGS(pEFlags); \
9356 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9357 \
9358 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9359 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9360 IEM_MC_END(); \
9361 break; \
9362 \
9363 case IEMMODE_64BIT: \
9364 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9366 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9367 IEM_MC_ARG(uint64_t, u64Src, 1); \
9368 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9369 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9370 \
9371 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9372 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9373 IEM_MC_REF_EFLAGS(pEFlags); \
9374 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9375 \
9376 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9377 IEM_MC_END(); \
9378 break; \
9379 \
9380 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9381 } \
9382 } \
9383 else \
9384 { \
9385 switch (pVCpu->iem.s.enmEffOpSize) \
9386 { \
9387 case IEMMODE_16BIT: \
9388 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9391 \
9392 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9394 \
9395 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9396 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9397 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9398 \
9399 IEM_MC_ARG(uint16_t, u16Src, 1); \
9400 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9401 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9402 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9403 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9404 \
9405 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9406 IEM_MC_COMMIT_EFLAGS(EFlags); \
9407 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9408 IEM_MC_END(); \
9409 break; \
9410 \
9411 case IEMMODE_32BIT: \
9412 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9415 \
9416 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9418 \
9419 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9420 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9421 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9422 \
9423 IEM_MC_ARG(uint32_t, u32Src, 1); \
9424 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9425 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9426 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9427 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9428 \
9429 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9430 IEM_MC_COMMIT_EFLAGS(EFlags); \
9431 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9432 IEM_MC_END(); \
9433 break; \
9434 \
9435 case IEMMODE_64BIT: \
9436 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9439 \
9440 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9442 \
9443 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9444 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9445 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9446 \
9447 IEM_MC_ARG(uint64_t, u64Src, 1); \
9448 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9449 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9450 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9451 \
9452 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9453 \
9454 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9455 IEM_MC_COMMIT_EFLAGS(EFlags); \
9456 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9457 IEM_MC_END(); \
9458 break; \
9459 \
9460 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9461 } \
9462 } (void)0
9463
9464
9465/**
9466 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9467 */
9468#define IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(a_pImplExpr) \
9469 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9470 \
9471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9472 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9473 \
9474 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9475 { \
9476 switch (pVCpu->iem.s.enmEffOpSize) \
9477 { \
9478 case IEMMODE_16BIT: \
9479 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9481 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9482 IEM_MC_ARG(uint16_t, u16Src, 1); \
9483 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9484 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9485 \
9486 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9487 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9488 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9489 IEM_MC_REF_EFLAGS(pEFlags); \
9490 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9491 \
9492 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9493 IEM_MC_END(); \
9494 break; \
9495 \
9496 case IEMMODE_32BIT: \
9497 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9499 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9500 IEM_MC_ARG(uint32_t, u32Src, 1); \
9501 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9502 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9503 \
9504 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9505 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9506 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9507 IEM_MC_REF_EFLAGS(pEFlags); \
9508 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9509 \
9510 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9511 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9512 IEM_MC_END(); \
9513 break; \
9514 \
9515 case IEMMODE_64BIT: \
9516 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9518 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9519 IEM_MC_ARG(uint64_t, u64Src, 1); \
9520 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9521 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9522 \
9523 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9524 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9525 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9526 IEM_MC_REF_EFLAGS(pEFlags); \
9527 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9528 \
9529 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9530 IEM_MC_END(); \
9531 break; \
9532 \
9533 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9534 } \
9535 } \
9536 else \
9537 { \
9538 switch (pVCpu->iem.s.enmEffOpSize) \
9539 { \
9540 case IEMMODE_16BIT: \
9541 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9542 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9543 IEM_MC_ARG(uint16_t, u16Src, 1); \
9544 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9545 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9546 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9547 \
9548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9550 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9551 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9552 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9553 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9554 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9555 \
9556 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9557 IEM_MC_COMMIT_EFLAGS(EFlags); \
9558 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9559 IEM_MC_END(); \
9560 break; \
9561 \
9562 case IEMMODE_32BIT: \
9563 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9564 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9565 IEM_MC_ARG(uint32_t, u32Src, 1); \
9566 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9568 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9569 \
9570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9572 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9573 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9574 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9575 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9576 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9577 \
9578 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9579 IEM_MC_COMMIT_EFLAGS(EFlags); \
9580 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9581 IEM_MC_END(); \
9582 break; \
9583 \
9584 case IEMMODE_64BIT: \
9585 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9586 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9587 IEM_MC_ARG(uint64_t, u64Src, 1); \
9588 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9590 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9591 \
9592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9594 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9595 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9596 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9597 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9598 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9599 \
9600 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9601 IEM_MC_COMMIT_EFLAGS(EFlags); \
9602 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9603 IEM_MC_END(); \
9604 break; \
9605 \
9606 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9607 } \
9608 } (void)0
9609
9610
9611/**
9612 * @opcode 0xa4
9613 * @opflclass shift_count
9614 */
9615FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9616{
9617 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9618 IEMOP_HLP_MIN_386();
9619 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9620}
9621
9622
9623/**
9624 * @opcode 0xa5
9625 * @opflclass shift_count
9626 */
9627FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9628{
9629 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9630 IEMOP_HLP_MIN_386();
9631 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9632}
9633
9634
9635/** Opcode 0x0f 0xa8. */
9636FNIEMOP_DEF(iemOp_push_gs)
9637{
9638 IEMOP_MNEMONIC(push_gs, "push gs");
9639 IEMOP_HLP_MIN_386();
9640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9641 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9642}
9643
9644
9645/** Opcode 0x0f 0xa9. */
9646FNIEMOP_DEF(iemOp_pop_gs)
9647{
9648 IEMOP_MNEMONIC(pop_gs, "pop gs");
9649 IEMOP_HLP_MIN_386();
9650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9651 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9652 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
9653 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9654 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9655 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9656 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9657 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9658 iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9659}
9660
9661
9662/** Opcode 0x0f 0xaa. */
9663FNIEMOP_DEF(iemOp_rsm)
9664{
9665 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9666 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9668 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9669 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
9670 iemCImpl_rsm);
9671}
9672
9673
9674
9675/**
9676 * @opcode 0xab
9677 * @oppfx n/a
9678 * @opflclass bitmap
9679 */
9680FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9681{
9682 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9683 IEMOP_HLP_MIN_386();
9684 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
9685 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9686}
9687
9688
9689/**
9690 * @opcode 0xac
9691 * @opflclass shift_count
9692 */
9693FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9694{
9695 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9696 IEMOP_HLP_MIN_386();
9697 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9698}
9699
9700
9701/**
9702 * @opcode 0xad
9703 * @opflclass shift_count
9704 */
9705FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9706{
9707 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9708 IEMOP_HLP_MIN_386();
9709 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9710}
9711
9712
9713/** Opcode 0x0f 0xae mem/0. */
9714FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9715{
9716 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9717 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9718 IEMOP_RAISE_INVALID_OPCODE_RET();
9719
9720 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9721 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9724 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9725 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9726 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9727 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9728 IEM_MC_END();
9729}
9730
9731
9732/** Opcode 0x0f 0xae mem/1. */
9733FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9734{
9735 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9736 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9737 IEMOP_RAISE_INVALID_OPCODE_RET();
9738
9739 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9740 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9743 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9744 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9745 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9746 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw) | RT_BIT_64(kIemNativeGstReg_MxCsr),
9747 iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9748 IEM_MC_END();
9749}
9750
9751
9752/**
9753 * @opmaps grp15
9754 * @opcode !11/2
9755 * @oppfx none
9756 * @opcpuid sse
9757 * @opgroup og_sse_mxcsrsm
9758 * @opxcpttype 5
9759 * @optest op1=0 -> mxcsr=0
9760 * @optest op1=0x2083 -> mxcsr=0x2083
9761 * @optest op1=0xfffffffe -> value.xcpt=0xd
9762 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9763 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9764 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9765 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9766 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9767 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9768 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9769 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9770 */
9771FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9772{
9773 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9774 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9775 IEMOP_RAISE_INVALID_OPCODE_RET();
9776
9777 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9778 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9781 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9782 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9783 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_MxCsr), iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9784 IEM_MC_END();
9785}
9786
9787
9788/**
9789 * @opmaps grp15
9790 * @opcode !11/3
9791 * @oppfx none
9792 * @opcpuid sse
9793 * @opgroup og_sse_mxcsrsm
9794 * @opxcpttype 5
9795 * @optest mxcsr=0 -> op1=0
9796 * @optest mxcsr=0x2083 -> op1=0x2083
9797 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9798 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9799 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9800 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9801 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9802 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9803 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9804 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9805 */
9806FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9807{
9808 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9809 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9810 IEMOP_RAISE_INVALID_OPCODE_RET();
9811
9812 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9813 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9816 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9817 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9818 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9819 IEM_MC_END();
9820}
9821
9822
9823/**
9824 * @opmaps grp15
9825 * @opcode !11/4
9826 * @oppfx none
9827 * @opcpuid xsave
9828 * @opgroup og_system
9829 * @opxcpttype none
9830 */
9831FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9832{
9833 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9834 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9835 IEMOP_RAISE_INVALID_OPCODE_RET();
9836
9837 IEM_MC_BEGIN(IEM_MC_F_MIN_CORE, 0);
9838 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9841 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9842 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9843 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9844 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9845 IEM_MC_END();
9846}
9847
9848
9849/**
9850 * @opmaps grp15
9851 * @opcode !11/5
9852 * @oppfx none
9853 * @opcpuid xsave
9854 * @opgroup og_system
9855 * @opxcpttype none
9856 */
9857FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9858{
9859 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9860 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9861 IEMOP_RAISE_INVALID_OPCODE_RET();
9862
9863 IEM_MC_BEGIN(IEM_MC_F_MIN_CORE, 0);
9864 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9867 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9868 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9869 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9870 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw) | RT_BIT_64(kIemNativeGstReg_MxCsr),
9871 iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9872 IEM_MC_END();
9873}
9874
9875/** Opcode 0x0f 0xae mem/6. */
9876FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9877
9878/**
9879 * @opmaps grp15
9880 * @opcode !11/7
9881 * @oppfx none
9882 * @opcpuid clfsh
9883 * @opgroup og_cachectl
9884 * @optest op1=1 ->
9885 */
9886FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9887{
9888 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9889 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9890 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9891
9892 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9893 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9896 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9897 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9898 IEM_MC_END();
9899}
9900
9901/**
9902 * @opmaps grp15
9903 * @opcode !11/7
9904 * @oppfx 0x66
9905 * @opcpuid clflushopt
9906 * @opgroup og_cachectl
9907 * @optest op1=1 ->
9908 */
9909FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9910{
9911 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9912 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9913 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9914
9915 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9916 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9919 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9920 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9921 IEM_MC_END();
9922}
9923
9924
9925/** Opcode 0x0f 0xae 11b/5. */
9926FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9927{
9928 RT_NOREF_PV(bRm);
9929 IEMOP_MNEMONIC(lfence, "lfence");
9930 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9932#ifdef RT_ARCH_ARM64
9933 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9934#else
9935 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9936 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9937 else
9938 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9939#endif
9940 IEM_MC_ADVANCE_RIP_AND_FINISH();
9941 IEM_MC_END();
9942}
9943
9944
9945/** Opcode 0x0f 0xae 11b/6. */
9946FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9947{
9948 RT_NOREF_PV(bRm);
9949 IEMOP_MNEMONIC(mfence, "mfence");
9950 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9952#ifdef RT_ARCH_ARM64
9953 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9954#else
9955 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9956 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9957 else
9958 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9959#endif
9960 IEM_MC_ADVANCE_RIP_AND_FINISH();
9961 IEM_MC_END();
9962}
9963
9964
9965/** Opcode 0x0f 0xae 11b/7. */
9966FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9967{
9968 RT_NOREF_PV(bRm);
9969 IEMOP_MNEMONIC(sfence, "sfence");
9970 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9972#ifdef RT_ARCH_ARM64
9973 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9974#else
9975 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9976 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9977 else
9978 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9979#endif
9980 IEM_MC_ADVANCE_RIP_AND_FINISH();
9981 IEM_MC_END();
9982}
9983
9984
9985/** Opcode 0xf3 0x0f 0xae 11b/0. */
9986FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9987{
9988 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9989 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9990 {
9991 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9993 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9994 IEM_MC_LOCAL(uint64_t, u64Dst);
9995 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9996 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9997 IEM_MC_ADVANCE_RIP_AND_FINISH();
9998 IEM_MC_END();
9999 }
10000 else
10001 {
10002 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10004 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10005 IEM_MC_LOCAL(uint32_t, u32Dst);
10006 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
10007 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10008 IEM_MC_ADVANCE_RIP_AND_FINISH();
10009 IEM_MC_END();
10010 }
10011}
10012
10013
10014/** Opcode 0xf3 0x0f 0xae 11b/1. */
10015FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
10016{
10017 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
10018 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10019 {
10020 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10022 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10023 IEM_MC_LOCAL(uint64_t, u64Dst);
10024 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
10025 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10026 IEM_MC_ADVANCE_RIP_AND_FINISH();
10027 IEM_MC_END();
10028 }
10029 else
10030 {
10031 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10033 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10034 IEM_MC_LOCAL(uint32_t, u32Dst);
10035 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
10036 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10037 IEM_MC_ADVANCE_RIP_AND_FINISH();
10038 IEM_MC_END();
10039 }
10040}
10041
10042
10043/** Opcode 0xf3 0x0f 0xae 11b/2. */
10044FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
10045{
10046 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
10047 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10048 {
10049 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10051 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10052 IEM_MC_LOCAL(uint64_t, u64Dst);
10053 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10054 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10055 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
10056 IEM_MC_ADVANCE_RIP_AND_FINISH();
10057 IEM_MC_END();
10058 }
10059 else
10060 {
10061 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10063 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10064 IEM_MC_LOCAL(uint32_t, u32Dst);
10065 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10066 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
10067 IEM_MC_ADVANCE_RIP_AND_FINISH();
10068 IEM_MC_END();
10069 }
10070}
10071
10072
10073/** Opcode 0xf3 0x0f 0xae 11b/3. */
10074FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
10075{
10076 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
10077 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10078 {
10079 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10081 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10082 IEM_MC_LOCAL(uint64_t, u64Dst);
10083 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10084 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10085 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
10086 IEM_MC_ADVANCE_RIP_AND_FINISH();
10087 IEM_MC_END();
10088 }
10089 else
10090 {
10091 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10093 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10094 IEM_MC_LOCAL(uint32_t, u32Dst);
10095 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10096 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10097 IEM_MC_ADVANCE_RIP_AND_FINISH();
10098 IEM_MC_END();
10099 }
10100}
10101
10102
10103/**
10104 * Group 15 jump table for register variant.
10105 */
10106IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10107{ /* pfx: none, 066h, 0f3h, 0f2h */
10108 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10109 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10110 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10111 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10112 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10113 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10114 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10115 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10116};
10117AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10118
10119
10120/**
10121 * Group 15 jump table for memory variant.
10122 */
10123IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10124{ /* pfx: none, 066h, 0f3h, 0f2h */
10125 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10126 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10127 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10128 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10129 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10130 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10131 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10132 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10133};
10134AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10135
10136
10137/** Opcode 0x0f 0xae. */
10138FNIEMOP_DEF(iemOp_Grp15)
10139{
10140 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10141 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10142 if (IEM_IS_MODRM_REG_MODE(bRm))
10143 /* register, register */
10144 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10145 + pVCpu->iem.s.idxPrefix], bRm);
10146 /* memory, register */
10147 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10148 + pVCpu->iem.s.idxPrefix], bRm);
10149}
10150
10151
10152/**
10153 * @opcode 0xaf
10154 * @opflclass multiply
10155 */
10156FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10157{
10158 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10159 IEMOP_HLP_MIN_386();
10160 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10161 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10163 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_MIN_386, imul, 0);
10164}
10165
10166
10167/**
10168 * @opcode 0xb0
10169 * @opflclass arithmetic
10170 */
10171FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10172{
10173 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10174 IEMOP_HLP_MIN_486();
10175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10176
10177 if (IEM_IS_MODRM_REG_MODE(bRm))
10178 {
10179 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10181 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10182 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10183 IEM_MC_ARG(uint8_t, u8Src, 2);
10184 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10185
10186 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10187 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10188 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10189 IEM_MC_REF_EFLAGS(pEFlags);
10190 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10191
10192 IEM_MC_ADVANCE_RIP_AND_FINISH();
10193 IEM_MC_END();
10194 }
10195 else
10196 {
10197#define IEMOP_BODY_CMPXCHG_BYTE(a_fnWorker, a_Type) \
10198 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10201 IEMOP_HLP_DONE_DECODING(); \
10202 \
10203 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10204 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
10205 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10206 \
10207 IEM_MC_ARG(uint8_t, u8Src, 2); \
10208 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10209 \
10210 IEM_MC_LOCAL(uint8_t, u8Al); \
10211 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX); \
10212 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Al, u8Al, 1); \
10213 \
10214 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10215 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu8Dst, pu8Al, u8Src, pEFlags); \
10216 \
10217 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10218 IEM_MC_COMMIT_EFLAGS(EFlags); \
10219 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al); \
10220 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10221 IEM_MC_END()
10222
10223 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10224 {
10225 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8,RW);
10226 }
10227 else
10228 {
10229 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8_locked,ATOMIC);
10230 }
10231 }
10232}
10233
10234/**
10235 * @opcode 0xb1
10236 * @opflclass arithmetic
10237 */
10238FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10239{
10240 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10241 IEMOP_HLP_MIN_486();
10242 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10243
10244 if (IEM_IS_MODRM_REG_MODE(bRm))
10245 {
10246 switch (pVCpu->iem.s.enmEffOpSize)
10247 {
10248 case IEMMODE_16BIT:
10249 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10251 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10252 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10253 IEM_MC_ARG(uint16_t, u16Src, 2);
10254 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10255
10256 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10257 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10258 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10259 IEM_MC_REF_EFLAGS(pEFlags);
10260 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10261
10262 IEM_MC_ADVANCE_RIP_AND_FINISH();
10263 IEM_MC_END();
10264 break;
10265
10266 case IEMMODE_32BIT:
10267 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10269 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10270 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10271 IEM_MC_ARG(uint32_t, u32Src, 2);
10272 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10273
10274 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10275 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10276 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10277 IEM_MC_REF_EFLAGS(pEFlags);
10278 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10279
10280 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10281 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
10282 } IEM_MC_ELSE() {
10283 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
10284 } IEM_MC_ENDIF();
10285
10286 IEM_MC_ADVANCE_RIP_AND_FINISH();
10287 IEM_MC_END();
10288 break;
10289
10290 case IEMMODE_64BIT:
10291 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10293 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10294 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10295 IEM_MC_ARG(uint64_t, u64Src, 2);
10296 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10297
10298 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10299 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10300 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10301 IEM_MC_REF_EFLAGS(pEFlags);
10302 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10303
10304 IEM_MC_ADVANCE_RIP_AND_FINISH();
10305 IEM_MC_END();
10306 break;
10307
10308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10309 }
10310 }
10311 else
10312 {
10313#define IEMOP_BODY_CMPXCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64,a_Type) \
10314 do { \
10315 switch (pVCpu->iem.s.enmEffOpSize) \
10316 { \
10317 case IEMMODE_16BIT: \
10318 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10319 \
10320 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10323 IEMOP_HLP_DONE_DECODING(); \
10324 \
10325 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10326 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10327 \
10328 IEM_MC_ARG(uint16_t, u16Src, 2); \
10329 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10330 \
10331 IEM_MC_LOCAL(uint16_t, u16Ax); \
10332 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX); \
10333 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Ax, u16Ax, 1); \
10334 \
10335 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10336 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker16, pu16Dst, pu16Ax, u16Src, pEFlags); \
10337 \
10338 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10339 IEM_MC_COMMIT_EFLAGS(EFlags); \
10340 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax); \
10341 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10342 IEM_MC_END(); \
10343 break; \
10344 \
10345 case IEMMODE_32BIT: \
10346 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10349 IEMOP_HLP_DONE_DECODING(); \
10350 \
10351 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10352 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10353 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10354 \
10355 IEM_MC_ARG(uint32_t, u32Src, 2); \
10356 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10357 \
10358 IEM_MC_LOCAL(uint32_t, u32Eax); \
10359 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX); \
10360 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Eax, u32Eax, 1); \
10361 \
10362 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10363 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker32, pu32Dst, pu32Eax, u32Src, pEFlags); \
10364 \
10365 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10366 IEM_MC_COMMIT_EFLAGS(EFlags); \
10367 \
10368 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
10369 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax); \
10370 } IEM_MC_ENDIF(); \
10371 \
10372 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10373 IEM_MC_END(); \
10374 break; \
10375 \
10376 case IEMMODE_64BIT: \
10377 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10380 IEMOP_HLP_DONE_DECODING(); \
10381 \
10382 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10383 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10384 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10385 \
10386 IEM_MC_ARG(uint64_t, u64Src, 2); \
10387 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10388 \
10389 IEM_MC_LOCAL(uint64_t, u64Rax); \
10390 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX); \
10391 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Rax, u64Rax, 1); \
10392 \
10393 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10394 \
10395 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker64, pu64Dst, pu64Rax, u64Src, pEFlags); \
10396 \
10397 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10398 IEM_MC_COMMIT_EFLAGS(EFlags); \
10399 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax); \
10400 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10401 IEM_MC_END(); \
10402 break; \
10403 \
10404 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10405 } \
10406 } while (0)
10407
10408 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10409 {
10410 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16, iemAImpl_cmpxchg_u32, iemAImpl_cmpxchg_u64,RW);
10411 }
10412 else
10413 {
10414 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16_locked, iemAImpl_cmpxchg_u32_locked, iemAImpl_cmpxchg_u64_locked,ATOMIC);
10415 }
10416 }
10417}
10418
10419
10420/** Opcode 0x0f 0xb2. */
10421FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10422{
10423 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10424 IEMOP_HLP_MIN_386();
10425 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10426 if (IEM_IS_MODRM_REG_MODE(bRm))
10427 IEMOP_RAISE_INVALID_OPCODE_RET();
10428 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10429}
10430
10431
10432/**
10433 * @opcode 0xb3
10434 * @oppfx n/a
10435 * @opflclass bitmap
10436 */
10437FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10438{
10439 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10440 IEMOP_HLP_MIN_386();
10441 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
10442 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10443}
10444
10445
10446/** Opcode 0x0f 0xb4. */
10447FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10448{
10449 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10450 IEMOP_HLP_MIN_386();
10451 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10452 if (IEM_IS_MODRM_REG_MODE(bRm))
10453 IEMOP_RAISE_INVALID_OPCODE_RET();
10454 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10455}
10456
10457
10458/** Opcode 0x0f 0xb5. */
10459FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10460{
10461 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10462 IEMOP_HLP_MIN_386();
10463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10464 if (IEM_IS_MODRM_REG_MODE(bRm))
10465 IEMOP_RAISE_INVALID_OPCODE_RET();
10466 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10467}
10468
10469
10470/** Opcode 0x0f 0xb6. */
10471FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10472{
10473 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10474 IEMOP_HLP_MIN_386();
10475
10476 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10477
10478 /*
10479 * If rm is denoting a register, no more instruction bytes.
10480 */
10481 if (IEM_IS_MODRM_REG_MODE(bRm))
10482 {
10483 switch (pVCpu->iem.s.enmEffOpSize)
10484 {
10485 case IEMMODE_16BIT:
10486 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10488 IEM_MC_LOCAL(uint16_t, u16Value);
10489 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10490 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10491 IEM_MC_ADVANCE_RIP_AND_FINISH();
10492 IEM_MC_END();
10493 break;
10494
10495 case IEMMODE_32BIT:
10496 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10498 IEM_MC_LOCAL(uint32_t, u32Value);
10499 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10500 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10501 IEM_MC_ADVANCE_RIP_AND_FINISH();
10502 IEM_MC_END();
10503 break;
10504
10505 case IEMMODE_64BIT:
10506 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10508 IEM_MC_LOCAL(uint64_t, u64Value);
10509 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10510 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10511 IEM_MC_ADVANCE_RIP_AND_FINISH();
10512 IEM_MC_END();
10513 break;
10514
10515 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10516 }
10517 }
10518 else
10519 {
10520 /*
10521 * We're loading a register from memory.
10522 */
10523 switch (pVCpu->iem.s.enmEffOpSize)
10524 {
10525 case IEMMODE_16BIT:
10526 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10527 IEM_MC_LOCAL(uint16_t, u16Value);
10528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10531 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10532 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10533 IEM_MC_ADVANCE_RIP_AND_FINISH();
10534 IEM_MC_END();
10535 break;
10536
10537 case IEMMODE_32BIT:
10538 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10539 IEM_MC_LOCAL(uint32_t, u32Value);
10540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10543 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10544 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10545 IEM_MC_ADVANCE_RIP_AND_FINISH();
10546 IEM_MC_END();
10547 break;
10548
10549 case IEMMODE_64BIT:
10550 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10551 IEM_MC_LOCAL(uint64_t, u64Value);
10552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10555 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10556 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10557 IEM_MC_ADVANCE_RIP_AND_FINISH();
10558 IEM_MC_END();
10559 break;
10560
10561 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10562 }
10563 }
10564}
10565
10566
10567/** Opcode 0x0f 0xb7. */
10568FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10569{
10570 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10571 IEMOP_HLP_MIN_386();
10572
10573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10574
10575 /** @todo Not entirely sure how the operand size prefix is handled here,
10576 * assuming that it will be ignored. Would be nice to have a few
10577 * test for this. */
10578
10579 /** @todo There should be no difference in the behaviour whether REX.W is
10580 * present or not... */
10581
10582 /*
10583 * If rm is denoting a register, no more instruction bytes.
10584 */
10585 if (IEM_IS_MODRM_REG_MODE(bRm))
10586 {
10587 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10588 {
10589 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10591 IEM_MC_LOCAL(uint32_t, u32Value);
10592 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10593 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10594 IEM_MC_ADVANCE_RIP_AND_FINISH();
10595 IEM_MC_END();
10596 }
10597 else
10598 {
10599 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10601 IEM_MC_LOCAL(uint64_t, u64Value);
10602 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10603 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10604 IEM_MC_ADVANCE_RIP_AND_FINISH();
10605 IEM_MC_END();
10606 }
10607 }
10608 else
10609 {
10610 /*
10611 * We're loading a register from memory.
10612 */
10613 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10614 {
10615 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10616 IEM_MC_LOCAL(uint32_t, u32Value);
10617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10620 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10621 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10622 IEM_MC_ADVANCE_RIP_AND_FINISH();
10623 IEM_MC_END();
10624 }
10625 else
10626 {
10627 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10628 IEM_MC_LOCAL(uint64_t, u64Value);
10629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10632 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10633 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10634 IEM_MC_ADVANCE_RIP_AND_FINISH();
10635 IEM_MC_END();
10636 }
10637 }
10638}
10639
10640
10641/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10642FNIEMOP_UD_STUB(iemOp_jmpe);
10643
10644
10645/**
10646 * @opcode 0xb8
10647 * @oppfx 0xf3
10648 * @opflmodify cf,pf,af,zf,sf,of
10649 * @opflclear cf,pf,af,sf,of
10650 */
10651FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10652{
10653 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10654 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10655 return iemOp_InvalidNeedRM(pVCpu);
10656#ifndef TST_IEM_CHECK_MC
10657# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10658 static const IEMOPBINSIZES s_Native =
10659 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10660# endif
10661 static const IEMOPBINSIZES s_Fallback =
10662 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10663#endif
10664 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10665 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10666 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, popcnt, 0);
10667}
10668
10669
10670/**
10671 * @opcode 0xb9
10672 * @opinvalid intel-modrm
10673 * @optest ->
10674 */
10675FNIEMOP_DEF(iemOp_Grp10)
10676{
10677 /*
10678 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10679 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10680 */
10681 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10682 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10683 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10684}
10685
10686
10687/**
10688 * Body for group 8 bit instruction.
10689 */
10690#define IEMOP_BODY_BIT_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10691 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10692 \
10693 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10694 { \
10695 /* register destination. */ \
10696 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10697 \
10698 switch (pVCpu->iem.s.enmEffOpSize) \
10699 { \
10700 case IEMMODE_16BIT: \
10701 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10703 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10704 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10705 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10706 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10707 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10708 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10709 \
10710 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10711 IEM_MC_END(); \
10712 break; \
10713 \
10714 case IEMMODE_32BIT: \
10715 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10717 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10718 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10719 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10720 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10721 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10722 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10723 \
10724 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
10725 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10726 IEM_MC_END(); \
10727 break; \
10728 \
10729 case IEMMODE_64BIT: \
10730 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10732 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10733 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10734 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10735 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10736 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10737 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10738 \
10739 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10740 IEM_MC_END(); \
10741 break; \
10742 \
10743 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10744 } \
10745 } \
10746 else \
10747 { \
10748 /* memory destination. */ \
10749 /** @todo test negative bit offsets! */ \
10750 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
10751 { \
10752 switch (pVCpu->iem.s.enmEffOpSize) \
10753 { \
10754 case IEMMODE_16BIT: \
10755 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10758 \
10759 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10760 IEMOP_HLP_DONE_DECODING(); \
10761 \
10762 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10763 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10764 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10765 \
10766 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10767 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10768 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10769 \
10770 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10771 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10772 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10773 IEM_MC_END(); \
10774 break; \
10775 \
10776 case IEMMODE_32BIT: \
10777 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10780 \
10781 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10782 IEMOP_HLP_DONE_DECODING(); \
10783 \
10784 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10785 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10786 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10787 \
10788 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10789 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10790 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10791 \
10792 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10793 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10794 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10795 IEM_MC_END(); \
10796 break; \
10797 \
10798 case IEMMODE_64BIT: \
10799 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10802 \
10803 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10804 IEMOP_HLP_DONE_DECODING(); \
10805 \
10806 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10807 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10808 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10809 \
10810 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10811 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10812 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10813 \
10814 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10815 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10816 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10817 IEM_MC_END(); \
10818 break; \
10819 \
10820 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10821 } \
10822 } \
10823 else \
10824 { \
10825 (void)0
10826/* Separate macro to work around parsing issue in IEMAllInstPython.py */
10827#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10828 switch (pVCpu->iem.s.enmEffOpSize) \
10829 { \
10830 case IEMMODE_16BIT: \
10831 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10834 \
10835 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10836 IEMOP_HLP_DONE_DECODING(); \
10837 \
10838 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10839 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10840 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10841 \
10842 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10843 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10844 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU16, fEFlagsIn, pu16Dst, u16Src); \
10845 \
10846 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10847 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10848 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10849 IEM_MC_END(); \
10850 break; \
10851 \
10852 case IEMMODE_32BIT: \
10853 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10856 \
10857 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10858 IEMOP_HLP_DONE_DECODING(); \
10859 \
10860 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10861 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10862 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10863 \
10864 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10865 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10866 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU32, fEFlagsIn, pu32Dst, u32Src); \
10867 \
10868 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10869 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10870 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10871 IEM_MC_END(); \
10872 break; \
10873 \
10874 case IEMMODE_64BIT: \
10875 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10878 \
10879 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10880 IEMOP_HLP_DONE_DECODING(); \
10881 \
10882 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10883 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10884 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10885 \
10886 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10887 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10888 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU64, fEFlagsIn, pu64Dst, u64Src); \
10889 \
10890 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10891 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10892 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10893 IEM_MC_END(); \
10894 break; \
10895 \
10896 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10897 } \
10898 } \
10899 } \
10900 (void)0
10901
10902/* Read-only version (bt) */
10903#define IEMOP_BODY_BIT_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10904 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10905 \
10906 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10907 { \
10908 /* register destination. */ \
10909 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10910 \
10911 switch (pVCpu->iem.s.enmEffOpSize) \
10912 { \
10913 case IEMMODE_16BIT: \
10914 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10916 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
10917 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10918 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10919 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10920 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10921 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10922 \
10923 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10924 IEM_MC_END(); \
10925 break; \
10926 \
10927 case IEMMODE_32BIT: \
10928 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10930 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
10931 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10932 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10933 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10934 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10935 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10936 \
10937 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10938 IEM_MC_END(); \
10939 break; \
10940 \
10941 case IEMMODE_64BIT: \
10942 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10944 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
10945 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10946 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10947 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10948 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10949 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10950 \
10951 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10952 IEM_MC_END(); \
10953 break; \
10954 \
10955 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10956 } \
10957 } \
10958 else \
10959 { \
10960 /* memory destination. */ \
10961 /** @todo test negative bit offsets! */ \
10962 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10963 { \
10964 switch (pVCpu->iem.s.enmEffOpSize) \
10965 { \
10966 case IEMMODE_16BIT: \
10967 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10970 \
10971 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10972 IEMOP_HLP_DONE_DECODING(); \
10973 \
10974 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10975 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
10976 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10977 \
10978 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10979 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10980 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10981 \
10982 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
10983 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10984 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10985 IEM_MC_END(); \
10986 break; \
10987 \
10988 case IEMMODE_32BIT: \
10989 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10991 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10992 \
10993 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10994 IEMOP_HLP_DONE_DECODING(); \
10995 \
10996 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10997 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
10998 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10999 \
11000 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11001 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
11002 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
11003 \
11004 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11005 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11006 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11007 IEM_MC_END(); \
11008 break; \
11009 \
11010 case IEMMODE_64BIT: \
11011 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11014 \
11015 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11016 IEMOP_HLP_DONE_DECODING(); \
11017 \
11018 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11019 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
11020 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11021 \
11022 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11023 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
11024 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
11025 \
11026 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11027 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11028 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11029 IEM_MC_END(); \
11030 break; \
11031 \
11032 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11033 } \
11034 } \
11035 else \
11036 { \
11037 IEMOP_HLP_DONE_DECODING(); \
11038 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
11039 } \
11040 } \
11041 (void)0
11042
11043
11044/**
11045 * @opmaps grp8
11046 * @opcode /4
11047 * @oppfx n/a
11048 * @opflclass bitmap
11049 */
11050FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
11051{
11052 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
11053 IEMOP_BODY_BIT_Ev_Ib_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
11054}
11055
11056
11057/**
11058 * @opmaps grp8
11059 * @opcode /5
11060 * @oppfx n/a
11061 * @opflclass bitmap
11062 */
11063FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
11064{
11065 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
11066 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
11067 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
11068}
11069
11070
11071/**
11072 * @opmaps grp8
11073 * @opcode /6
11074 * @oppfx n/a
11075 * @opflclass bitmap
11076 */
11077FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
11078{
11079 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
11080 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
11081 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
11082}
11083
11084
11085/**
11086 * @opmaps grp8
11087 * @opcode /7
11088 * @oppfx n/a
11089 * @opflclass bitmap
11090 */
11091FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
11092{
11093 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
11094 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11095 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11096}
11097
11098
11099/** Opcode 0x0f 0xba. */
11100FNIEMOP_DEF(iemOp_Grp8)
11101{
11102 IEMOP_HLP_MIN_386();
11103 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11104 switch (IEM_GET_MODRM_REG_8(bRm))
11105 {
11106 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
11107 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
11108 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
11109 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
11110
11111 case 0: case 1: case 2: case 3:
11112 /* Both AMD and Intel want full modr/m decoding and imm8. */
11113 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
11114
11115 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11116 }
11117}
11118
11119
11120/**
11121 * @opcode 0xbb
11122 * @oppfx n/a
11123 * @opflclass bitmap
11124 */
11125FNIEMOP_DEF(iemOp_btc_Ev_Gv)
11126{
11127 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
11128 IEMOP_HLP_MIN_386();
11129 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11130 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11131}
11132
11133
11134/**
11135 * Body for BSF and BSR instructions.
11136 *
11137 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
11138 * the destination register, which means that for 32-bit operations the high
11139 * bits must be left alone.
11140 *
11141 * @param pImpl Pointer to the instruction implementation (assembly).
11142 */
11143#define IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl) \
11144 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
11145 \
11146 /* \
11147 * If rm is denoting a register, no more instruction bytes. \
11148 */ \
11149 if (IEM_IS_MODRM_REG_MODE(bRm)) \
11150 { \
11151 switch (pVCpu->iem.s.enmEffOpSize) \
11152 { \
11153 case IEMMODE_16BIT: \
11154 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11156 \
11157 IEM_MC_ARG(uint16_t, u16Src, 2); \
11158 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11159 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
11160 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11161 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11162 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
11163 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11164 \
11165 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11166 IEM_MC_END(); \
11167 break; \
11168 \
11169 case IEMMODE_32BIT: \
11170 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11172 \
11173 IEM_MC_ARG(uint32_t, u32Src, 2); \
11174 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11175 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
11176 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11177 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11178 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
11179 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11180 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11181 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11182 } IEM_MC_ENDIF(); \
11183 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11184 IEM_MC_END(); \
11185 break; \
11186 \
11187 case IEMMODE_64BIT: \
11188 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11190 \
11191 IEM_MC_ARG(uint64_t, u64Src, 2); \
11192 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11193 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
11194 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11195 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11196 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
11197 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11198 \
11199 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11200 IEM_MC_END(); \
11201 break; \
11202 \
11203 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11204 } \
11205 } \
11206 else \
11207 { \
11208 /* \
11209 * We're accessing memory. \
11210 */ \
11211 switch (pVCpu->iem.s.enmEffOpSize) \
11212 { \
11213 case IEMMODE_16BIT: \
11214 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11215 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11218 \
11219 IEM_MC_ARG(uint16_t, u16Src, 2); \
11220 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11221 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
11222 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11223 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11224 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
11225 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11226 \
11227 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11228 IEM_MC_END(); \
11229 break; \
11230 \
11231 case IEMMODE_32BIT: \
11232 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11236 \
11237 IEM_MC_ARG(uint32_t, u32Src, 2); \
11238 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11239 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
11240 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11241 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11242 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
11243 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11244 \
11245 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11246 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11247 } IEM_MC_ENDIF(); \
11248 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11249 IEM_MC_END(); \
11250 break; \
11251 \
11252 case IEMMODE_64BIT: \
11253 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11257 \
11258 IEM_MC_ARG(uint64_t, u64Src, 2); \
11259 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11260 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
11261 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11262 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11263 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
11264 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11265 \
11266 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11267 IEM_MC_END(); \
11268 break; \
11269 \
11270 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11271 } \
11272 } (void)0
11273
11274
11275/**
11276 * @opcode 0xbc
11277 * @oppfx !0xf3
11278 * @opfltest cf,pf,af,sf,of
11279 * @opflmodify cf,pf,af,zf,sf,of
11280 * @opflundef cf,pf,af,sf,of
11281 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11282 * document them as inputs. Sigh.
11283 */
11284FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11285{
11286 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11287 IEMOP_HLP_MIN_386();
11288 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11289 PCIEMOPBINSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags);
11290 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11291}
11292
11293
11294/**
11295 * @opcode 0xbc
11296 * @oppfx 0xf3
11297 * @opfltest pf,af,sf,of
11298 * @opflmodify cf,pf,af,zf,sf,of
11299 * @opflundef pf,af,sf,of
11300 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11301 * document them as inputs. Sigh.
11302 */
11303FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11304{
11305 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11306 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11307 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11308
11309#ifndef TST_IEM_CHECK_MC
11310 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11311 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11312 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11313 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11314 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11315 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11316 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11317 {
11318 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11319 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11320 };
11321#endif
11322 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11323 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11324 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11325 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11326 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, tzcnt, 0);
11327}
11328
11329
11330/**
11331 * @opcode 0xbd
11332 * @oppfx !0xf3
11333 * @opfltest cf,pf,af,sf,of
11334 * @opflmodify cf,pf,af,zf,sf,of
11335 * @opflundef cf,pf,af,sf,of
11336 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11337 * document them as inputs. Sigh.
11338 */
11339FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11340{
11341 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11342 IEMOP_HLP_MIN_386();
11343 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11344 PCIEMOPBINSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags);
11345 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11346}
11347
11348
11349/**
11350 * @opcode 0xbd
11351 * @oppfx 0xf3
11352 * @opfltest pf,af,sf,of
11353 * @opflmodify cf,pf,af,zf,sf,of
11354 * @opflundef pf,af,sf,of
11355 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11356 * document them as inputs. Sigh.
11357 */
11358FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11359{
11360 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAbm)
11361 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11362 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11363
11364#ifndef TST_IEM_CHECK_MC
11365 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11366 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11367 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11368 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11369 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11370 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11371 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11372 {
11373 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11374 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11375 };
11376#endif
11377 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11378 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11379 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11380 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11381 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, lzcnt, 0);
11382}
11383
11384
11385
11386/** Opcode 0x0f 0xbe. */
11387FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11388{
11389 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11390 IEMOP_HLP_MIN_386();
11391
11392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11393
11394 /*
11395 * If rm is denoting a register, no more instruction bytes.
11396 */
11397 if (IEM_IS_MODRM_REG_MODE(bRm))
11398 {
11399 switch (pVCpu->iem.s.enmEffOpSize)
11400 {
11401 case IEMMODE_16BIT:
11402 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11404 IEM_MC_LOCAL(uint16_t, u16Value);
11405 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11406 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11407 IEM_MC_ADVANCE_RIP_AND_FINISH();
11408 IEM_MC_END();
11409 break;
11410
11411 case IEMMODE_32BIT:
11412 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11414 IEM_MC_LOCAL(uint32_t, u32Value);
11415 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11416 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11417 IEM_MC_ADVANCE_RIP_AND_FINISH();
11418 IEM_MC_END();
11419 break;
11420
11421 case IEMMODE_64BIT:
11422 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11424 IEM_MC_LOCAL(uint64_t, u64Value);
11425 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11426 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11427 IEM_MC_ADVANCE_RIP_AND_FINISH();
11428 IEM_MC_END();
11429 break;
11430
11431 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11432 }
11433 }
11434 else
11435 {
11436 /*
11437 * We're loading a register from memory.
11438 */
11439 switch (pVCpu->iem.s.enmEffOpSize)
11440 {
11441 case IEMMODE_16BIT:
11442 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11443 IEM_MC_LOCAL(uint16_t, u16Value);
11444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11447 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11448 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11449 IEM_MC_ADVANCE_RIP_AND_FINISH();
11450 IEM_MC_END();
11451 break;
11452
11453 case IEMMODE_32BIT:
11454 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11455 IEM_MC_LOCAL(uint32_t, u32Value);
11456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11459 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11460 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11461 IEM_MC_ADVANCE_RIP_AND_FINISH();
11462 IEM_MC_END();
11463 break;
11464
11465 case IEMMODE_64BIT:
11466 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11467 IEM_MC_LOCAL(uint64_t, u64Value);
11468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11471 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11472 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11473 IEM_MC_ADVANCE_RIP_AND_FINISH();
11474 IEM_MC_END();
11475 break;
11476
11477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11478 }
11479 }
11480}
11481
11482
11483/** Opcode 0x0f 0xbf. */
11484FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11485{
11486 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11487 IEMOP_HLP_MIN_386();
11488
11489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11490
11491 /** @todo Not entirely sure how the operand size prefix is handled here,
11492 * assuming that it will be ignored. Would be nice to have a few
11493 * test for this. */
11494 /*
11495 * If rm is denoting a register, no more instruction bytes.
11496 */
11497 if (IEM_IS_MODRM_REG_MODE(bRm))
11498 {
11499 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11500 {
11501 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11503 IEM_MC_LOCAL(uint32_t, u32Value);
11504 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11505 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11506 IEM_MC_ADVANCE_RIP_AND_FINISH();
11507 IEM_MC_END();
11508 }
11509 else
11510 {
11511 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11513 IEM_MC_LOCAL(uint64_t, u64Value);
11514 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11515 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11516 IEM_MC_ADVANCE_RIP_AND_FINISH();
11517 IEM_MC_END();
11518 }
11519 }
11520 else
11521 {
11522 /*
11523 * We're loading a register from memory.
11524 */
11525 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11526 {
11527 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11528 IEM_MC_LOCAL(uint32_t, u32Value);
11529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11532 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11533 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11534 IEM_MC_ADVANCE_RIP_AND_FINISH();
11535 IEM_MC_END();
11536 }
11537 else
11538 {
11539 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11540 IEM_MC_LOCAL(uint64_t, u64Value);
11541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11544 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11545 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11546 IEM_MC_ADVANCE_RIP_AND_FINISH();
11547 IEM_MC_END();
11548 }
11549 }
11550}
11551
11552
11553/**
11554 * @opcode 0xc0
11555 * @opflclass arithmetic
11556 */
11557FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11558{
11559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11560 IEMOP_HLP_MIN_486();
11561 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11562
11563 /*
11564 * If rm is denoting a register, no more instruction bytes.
11565 */
11566 if (IEM_IS_MODRM_REG_MODE(bRm))
11567 {
11568 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11570 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11571 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11572 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11573
11574 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11575 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11576 IEM_MC_REF_EFLAGS(pEFlags);
11577 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11578
11579 IEM_MC_ADVANCE_RIP_AND_FINISH();
11580 IEM_MC_END();
11581 }
11582 else
11583 {
11584 /*
11585 * We're accessing memory.
11586 */
11587#define IEMOP_BODY_XADD_BYTE(a_fnWorker, a_Type) \
11588 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11591 IEMOP_HLP_DONE_DECODING(); \
11592 \
11593 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11594 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11595 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11596 \
11597 IEM_MC_LOCAL(uint8_t, u8RegCopy); \
11598 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11599 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, u8RegCopy, 1); \
11600 \
11601 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11602 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker, pu8Dst, pu8Reg, pEFlags); \
11603 \
11604 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11605 IEM_MC_COMMIT_EFLAGS(EFlags); \
11606 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy); \
11607 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11608 IEM_MC_END()
11609 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11610 {
11611 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8,RW);
11612 }
11613 else
11614 {
11615 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8_locked,ATOMIC);
11616 }
11617 }
11618}
11619
11620
11621/**
11622 * @opcode 0xc1
11623 * @opflclass arithmetic
11624 */
11625FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11626{
11627 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11628 IEMOP_HLP_MIN_486();
11629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11630
11631 /*
11632 * If rm is denoting a register, no more instruction bytes.
11633 */
11634 if (IEM_IS_MODRM_REG_MODE(bRm))
11635 {
11636 switch (pVCpu->iem.s.enmEffOpSize)
11637 {
11638 case IEMMODE_16BIT:
11639 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11641 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11642 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11643 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11644
11645 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11646 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11647 IEM_MC_REF_EFLAGS(pEFlags);
11648 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11649
11650 IEM_MC_ADVANCE_RIP_AND_FINISH();
11651 IEM_MC_END();
11652 break;
11653
11654 case IEMMODE_32BIT:
11655 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11657 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11658 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11659 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11660
11661 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11662 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11663 IEM_MC_REF_EFLAGS(pEFlags);
11664 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11665
11666 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
11667 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11668 IEM_MC_ADVANCE_RIP_AND_FINISH();
11669 IEM_MC_END();
11670 break;
11671
11672 case IEMMODE_64BIT:
11673 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11675 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11676 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11677 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11678
11679 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11680 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11681 IEM_MC_REF_EFLAGS(pEFlags);
11682 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11683
11684 IEM_MC_ADVANCE_RIP_AND_FINISH();
11685 IEM_MC_END();
11686 break;
11687
11688 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11689 }
11690 }
11691 else
11692 {
11693 /*
11694 * We're accessing memory.
11695 */
11696#define IEMOP_BODY_XADD_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
11697 do { \
11698 switch (pVCpu->iem.s.enmEffOpSize) \
11699 { \
11700 case IEMMODE_16BIT: \
11701 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11702 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11704 IEMOP_HLP_DONE_DECODING(); \
11705 \
11706 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11707 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11708 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11709 \
11710 IEM_MC_LOCAL(uint16_t, u16RegCopy); \
11711 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11712 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, u16RegCopy, 1); \
11713 \
11714 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11715 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker16, pu16Dst, pu16Reg, pEFlags); \
11716 \
11717 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11718 IEM_MC_COMMIT_EFLAGS(EFlags); \
11719 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy); \
11720 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11721 IEM_MC_END(); \
11722 break; \
11723 \
11724 case IEMMODE_32BIT: \
11725 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11728 IEMOP_HLP_DONE_DECODING(); \
11729 \
11730 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11731 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11732 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11733 \
11734 IEM_MC_LOCAL(uint32_t, u32RegCopy); \
11735 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11736 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, u32RegCopy, 1); \
11737 \
11738 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11739 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker32, pu32Dst, pu32Reg, pEFlags); \
11740 \
11741 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11742 IEM_MC_COMMIT_EFLAGS(EFlags); \
11743 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy); \
11744 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11745 IEM_MC_END(); \
11746 break; \
11747 \
11748 case IEMMODE_64BIT: \
11749 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11752 IEMOP_HLP_DONE_DECODING(); \
11753 \
11754 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11755 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11756 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11757 \
11758 IEM_MC_LOCAL(uint64_t, u64RegCopy); \
11759 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11760 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, u64RegCopy, 1); \
11761 \
11762 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11763 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker64, pu64Dst, pu64Reg, pEFlags); \
11764 \
11765 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11766 IEM_MC_COMMIT_EFLAGS(EFlags); \
11767 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy); \
11768 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11769 IEM_MC_END(); \
11770 break; \
11771 \
11772 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11773 } \
11774 } while (0)
11775
11776 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11777 {
11778 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16, iemAImpl_xadd_u32, iemAImpl_xadd_u64,RW);
11779 }
11780 else
11781 {
11782 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16_locked, iemAImpl_xadd_u32_locked, iemAImpl_xadd_u64_locked,ATOMIC);
11783 }
11784 }
11785}
11786
11787
11788/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11789FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11790{
11791 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11792
11793 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11794 if (IEM_IS_MODRM_REG_MODE(bRm))
11795 {
11796 /*
11797 * XMM, XMM.
11798 */
11799 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11800 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11802 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11803 IEM_MC_LOCAL(X86XMMREG, Dst);
11804 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11805 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11806 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11807 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11808 IEM_MC_PREPARE_SSE_USAGE();
11809 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11810 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpps_u128, pDst, pSrc, bImmArg);
11811 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11812
11813 IEM_MC_ADVANCE_RIP_AND_FINISH();
11814 IEM_MC_END();
11815 }
11816 else
11817 {
11818 /*
11819 * XMM, [mem128].
11820 */
11821 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11822 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11823 IEM_MC_LOCAL(X86XMMREG, Dst);
11824 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11825 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11827
11828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11829 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11830 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11832 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11833 IEM_MC_PREPARE_SSE_USAGE();
11834
11835 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11836 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpps_u128, pDst, pSrc, bImmArg);
11837 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11838
11839 IEM_MC_ADVANCE_RIP_AND_FINISH();
11840 IEM_MC_END();
11841 }
11842}
11843
11844
11845/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11846FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11847{
11848 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11849
11850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11851 if (IEM_IS_MODRM_REG_MODE(bRm))
11852 {
11853 /*
11854 * XMM, XMM.
11855 */
11856 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11857 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11859 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11860 IEM_MC_LOCAL(X86XMMREG, Dst);
11861 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11862 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11863 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11864 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11865 IEM_MC_PREPARE_SSE_USAGE();
11866 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11867 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmppd_u128, pDst, pSrc, bImmArg);
11868 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11869
11870 IEM_MC_ADVANCE_RIP_AND_FINISH();
11871 IEM_MC_END();
11872 }
11873 else
11874 {
11875 /*
11876 * XMM, [mem128].
11877 */
11878 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11879 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11880 IEM_MC_LOCAL(X86XMMREG, Dst);
11881 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11882 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11884
11885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11886 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11887 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11889 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11890 IEM_MC_PREPARE_SSE_USAGE();
11891
11892 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11893 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmppd_u128, pDst, pSrc, bImmArg);
11894 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11895
11896 IEM_MC_ADVANCE_RIP_AND_FINISH();
11897 IEM_MC_END();
11898 }
11899}
11900
11901
11902/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11903FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11904{
11905 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11906
11907 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11908 if (IEM_IS_MODRM_REG_MODE(bRm))
11909 {
11910 /*
11911 * XMM32, XMM32.
11912 */
11913 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11914 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11916 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11917 IEM_MC_LOCAL(X86XMMREG, Dst);
11918 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11919 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11920 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11921 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11922 IEM_MC_PREPARE_SSE_USAGE();
11923 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11924 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpss_u128, pDst, pSrc, bImmArg);
11925 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11926
11927 IEM_MC_ADVANCE_RIP_AND_FINISH();
11928 IEM_MC_END();
11929 }
11930 else
11931 {
11932 /*
11933 * XMM32, [mem32].
11934 */
11935 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11936 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11937 IEM_MC_LOCAL(X86XMMREG, Dst);
11938 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11939 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11941
11942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11943 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11944 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11946 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11947 IEM_MC_PREPARE_SSE_USAGE();
11948
11949 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
11950 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11951 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpss_u128, pDst, pSrc, bImmArg);
11952 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11953
11954 IEM_MC_ADVANCE_RIP_AND_FINISH();
11955 IEM_MC_END();
11956 }
11957}
11958
11959
11960/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11961FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11962{
11963 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11964
11965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11966 if (IEM_IS_MODRM_REG_MODE(bRm))
11967 {
11968 /*
11969 * XMM64, XMM64.
11970 */
11971 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11972 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11974 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11975 IEM_MC_LOCAL(X86XMMREG, Dst);
11976 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11977 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11978 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11979 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11980 IEM_MC_PREPARE_SSE_USAGE();
11981 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11982 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpsd_u128, pDst, pSrc, bImmArg);
11983 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11984
11985 IEM_MC_ADVANCE_RIP_AND_FINISH();
11986 IEM_MC_END();
11987 }
11988 else
11989 {
11990 /*
11991 * XMM64, [mem64].
11992 */
11993 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11994 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11995 IEM_MC_LOCAL(X86XMMREG, Dst);
11996 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11997 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11999
12000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12001 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12002 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12004 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12005 IEM_MC_PREPARE_SSE_USAGE();
12006
12007 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
12008 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12009 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpsd_u128, pDst, pSrc, bImmArg);
12010 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
12011
12012 IEM_MC_ADVANCE_RIP_AND_FINISH();
12013 IEM_MC_END();
12014 }
12015}
12016
12017
12018/** Opcode 0x0f 0xc3. */
12019FNIEMOP_DEF(iemOp_movnti_My_Gy)
12020{
12021 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
12022
12023 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12024
12025 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
12026 if (IEM_IS_MODRM_MEM_MODE(bRm))
12027 {
12028 switch (pVCpu->iem.s.enmEffOpSize)
12029 {
12030 case IEMMODE_32BIT:
12031 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
12032 IEM_MC_LOCAL(uint32_t, u32Value);
12033 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12034
12035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12037
12038 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12039 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
12040 IEM_MC_ADVANCE_RIP_AND_FINISH();
12041 IEM_MC_END();
12042 break;
12043
12044 case IEMMODE_64BIT:
12045 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
12046 IEM_MC_LOCAL(uint64_t, u64Value);
12047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12048
12049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12051
12052 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12053 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
12054 IEM_MC_ADVANCE_RIP_AND_FINISH();
12055 IEM_MC_END();
12056 break;
12057
12058 case IEMMODE_16BIT:
12059 /** @todo check this form. */
12060 IEMOP_RAISE_INVALID_OPCODE_RET();
12061
12062 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12063 }
12064 }
12065 else
12066 IEMOP_RAISE_INVALID_OPCODE_RET();
12067}
12068
12069
12070/* Opcode 0x66 0x0f 0xc3 - invalid */
12071/* Opcode 0xf3 0x0f 0xc3 - invalid */
12072/* Opcode 0xf2 0x0f 0xc3 - invalid */
12073
12074
12075/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
12076FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
12077{
12078 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12080 if (IEM_IS_MODRM_REG_MODE(bRm))
12081 {
12082 /*
12083 * Register, register.
12084 */
12085 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12086 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12087 IEM_MC_LOCAL(uint16_t, uValue);
12088
12089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12090 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12091 IEM_MC_PREPARE_FPU_USAGE();
12092 IEM_MC_FPU_TO_MMX_MODE();
12093
12094 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
12095 IEM_MC_STORE_MREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
12096
12097 IEM_MC_ADVANCE_RIP_AND_FINISH();
12098 IEM_MC_END();
12099 }
12100 else
12101 {
12102 /*
12103 * Register, memory.
12104 */
12105 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12107 IEM_MC_LOCAL(uint16_t, uValue);
12108
12109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12110 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12112 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12113 IEM_MC_PREPARE_FPU_USAGE();
12114
12115 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12116 IEM_MC_FPU_TO_MMX_MODE();
12117 IEM_MC_STORE_MREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
12118
12119 IEM_MC_ADVANCE_RIP_AND_FINISH();
12120 IEM_MC_END();
12121 }
12122}
12123
12124
12125/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
12126FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
12127{
12128 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12130 if (IEM_IS_MODRM_REG_MODE(bRm))
12131 {
12132 /*
12133 * Register, register.
12134 */
12135 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12136 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12138
12139 IEM_MC_LOCAL(uint16_t, uValue);
12140 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12141 IEM_MC_PREPARE_SSE_USAGE();
12142
12143 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
12144 IEM_MC_STORE_XREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
12145 IEM_MC_ADVANCE_RIP_AND_FINISH();
12146 IEM_MC_END();
12147 }
12148 else
12149 {
12150 /*
12151 * Register, memory.
12152 */
12153 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12155 IEM_MC_LOCAL(uint16_t, uValue);
12156
12157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12158 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12160 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12161 IEM_MC_PREPARE_SSE_USAGE();
12162
12163 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12164 IEM_MC_STORE_XREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
12165 IEM_MC_ADVANCE_RIP_AND_FINISH();
12166 IEM_MC_END();
12167 }
12168}
12169
12170
12171/* Opcode 0xf3 0x0f 0xc4 - invalid */
12172/* Opcode 0xf2 0x0f 0xc4 - invalid */
12173
12174
12175/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
12176FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
12177{
12178 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
12179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12180 if (IEM_IS_MODRM_REG_MODE(bRm))
12181 {
12182 /*
12183 * Greg32, MMX, imm8.
12184 */
12185 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12186 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12188 IEM_MC_LOCAL(uint16_t, uValue);
12189 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12190 IEM_MC_PREPARE_FPU_USAGE();
12191 IEM_MC_FPU_TO_MMX_MODE();
12192 IEM_MC_FETCH_MREG_U16(uValue, IEM_GET_MODRM_RM_8(bRm), bImm & 3);
12193 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
12194 IEM_MC_ADVANCE_RIP_AND_FINISH();
12195 IEM_MC_END();
12196 }
12197 /* No memory operand. */
12198 else
12199 IEMOP_RAISE_INVALID_OPCODE_RET();
12200}
12201
12202
12203/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12204FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12205{
12206 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12208 if (IEM_IS_MODRM_REG_MODE(bRm))
12209 {
12210 /*
12211 * Greg32, XMM, imm8.
12212 */
12213 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12214 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12216 IEM_MC_LOCAL(uint16_t, uValue);
12217 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12218 IEM_MC_PREPARE_SSE_USAGE();
12219 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
12220 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
12221 IEM_MC_ADVANCE_RIP_AND_FINISH();
12222 IEM_MC_END();
12223 }
12224 /* No memory operand. */
12225 else
12226 IEMOP_RAISE_INVALID_OPCODE_RET();
12227}
12228
12229
12230/* Opcode 0xf3 0x0f 0xc5 - invalid */
12231/* Opcode 0xf2 0x0f 0xc5 - invalid */
12232
12233
12234/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12235FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12236{
12237 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12238 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12239 if (IEM_IS_MODRM_REG_MODE(bRm))
12240 {
12241 /*
12242 * XMM, XMM, imm8.
12243 */
12244 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12245 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12247 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12248 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12249 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12250 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12251 IEM_MC_PREPARE_SSE_USAGE();
12252 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12253 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12254 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12255 IEM_MC_ADVANCE_RIP_AND_FINISH();
12256 IEM_MC_END();
12257 }
12258 else
12259 {
12260 /*
12261 * XMM, [mem128], imm8.
12262 */
12263 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12264 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12265 IEM_MC_LOCAL(RTUINT128U, uSrc);
12266 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12268
12269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12270 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12271 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12273 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12274 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12275
12276 IEM_MC_PREPARE_SSE_USAGE();
12277 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12278 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12279
12280 IEM_MC_ADVANCE_RIP_AND_FINISH();
12281 IEM_MC_END();
12282 }
12283}
12284
12285
12286/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12287FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12288{
12289 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12291 if (IEM_IS_MODRM_REG_MODE(bRm))
12292 {
12293 /*
12294 * XMM, XMM, imm8.
12295 */
12296 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12297 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12299 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12300 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12301 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12302 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12303 IEM_MC_PREPARE_SSE_USAGE();
12304 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12305 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12306 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12307 IEM_MC_ADVANCE_RIP_AND_FINISH();
12308 IEM_MC_END();
12309 }
12310 else
12311 {
12312 /*
12313 * XMM, [mem128], imm8.
12314 */
12315 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12316 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12317 IEM_MC_LOCAL(RTUINT128U, uSrc);
12318 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12320
12321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12322 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12323 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12325 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12326 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12327
12328 IEM_MC_PREPARE_SSE_USAGE();
12329 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12330 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12331
12332 IEM_MC_ADVANCE_RIP_AND_FINISH();
12333 IEM_MC_END();
12334 }
12335}
12336
12337
12338/* Opcode 0xf3 0x0f 0xc6 - invalid */
12339/* Opcode 0xf2 0x0f 0xc6 - invalid */
12340
12341
12342/**
12343 * @opmaps grp9
12344 * @opcode /1
12345 * @opcodesub !11 mr/reg rex.w=0
12346 * @oppfx n/a
12347 * @opflmodify zf
12348 */
12349FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12350{
12351 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12352#define IEMOP_BODY_CMPXCHG8B(a_fnWorker, a_Type) \
12353 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
12354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12356 IEMOP_HLP_DONE_DECODING_EX(fCmpXchg8b); \
12357 \
12358 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12359 IEM_MC_ARG(uint64_t *, pu64MemDst, 0); \
12360 IEM_MC_MEM_MAP_U64_##a_Type(pu64MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12361 \
12362 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx); \
12363 IEM_MC_FETCH_GREG_PAIR_U32(u64EaxEdx, X86_GREG_xAX, X86_GREG_xDX); \
12364 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EaxEdx, u64EaxEdx, 1); \
12365 \
12366 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx); \
12367 IEM_MC_FETCH_GREG_PAIR_U32(u64EbxEcx, X86_GREG_xBX, X86_GREG_xCX); \
12368 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EbxEcx, u64EbxEcx, 2); \
12369 \
12370 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
12371 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags); \
12372 \
12373 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12374 IEM_MC_COMMIT_EFLAGS(EFlags); \
12375 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12376 IEM_MC_STORE_GREG_PAIR_U32(X86_GREG_xAX, X86_GREG_xDX, u64EaxEdx); \
12377 } IEM_MC_ENDIF(); \
12378 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12379 \
12380 IEM_MC_END()
12381 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12382 {
12383 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b,RW);
12384 }
12385 else
12386 {
12387 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b_locked,ATOMIC);
12388 }
12389}
12390
12391
12392/**
12393 * @opmaps grp9
12394 * @opcode /1
12395 * @opcodesub !11 mr/reg rex.w=1
12396 * @oppfx n/a
12397 * @opflmodify zf
12398 */
12399FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12400{
12401 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12402 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12403 {
12404 /*
12405 * This is hairy, very hairy macro fun. We're walking a fine line
12406 * here to make the code parsable by IEMAllInstPython.py and fit into
12407 * the patterns IEMAllThrdPython.py requires for the code morphing.
12408 */
12409#define BODY_CMPXCHG16B_HEAD(bUnmapInfoStmt, a_Type) \
12410 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
12411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12413 IEMOP_HLP_DONE_DECODING(); \
12414 \
12415 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16); \
12416 bUnmapInfoStmt; \
12417 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0); \
12418 IEM_MC_MEM_MAP_U128_##a_Type(pu128MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12419 \
12420 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx); \
12421 IEM_MC_FETCH_GREG_PAIR_U64(u128RaxRdx, X86_GREG_xAX, X86_GREG_xDX); \
12422 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RaxRdx, u128RaxRdx, 1); \
12423 \
12424 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx); \
12425 IEM_MC_FETCH_GREG_PAIR_U64(u128RbxRcx, X86_GREG_xBX, X86_GREG_xCX); \
12426 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RbxRcx, u128RbxRcx, 2); \
12427 \
12428 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3)
12429
12430#define BODY_CMPXCHG16B_TAIL(a_Type) \
12431 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12432 IEM_MC_COMMIT_EFLAGS(EFlags); \
12433 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12434 IEM_MC_STORE_GREG_PAIR_U64(X86_GREG_xAX, X86_GREG_xDX, u128RaxRdx); \
12435 } IEM_MC_ENDIF(); \
12436 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12437 IEM_MC_END()
12438
12439#ifdef RT_ARCH_AMD64
12440 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12441 {
12442 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12443 {
12444 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12445 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12446 BODY_CMPXCHG16B_TAIL(RW);
12447 }
12448 else
12449 {
12450 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12451 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12452 BODY_CMPXCHG16B_TAIL(ATOMIC);
12453 }
12454 }
12455 else
12456 { /* (see comments in #else case below) */
12457 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12458 {
12459 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12460 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12461 BODY_CMPXCHG16B_TAIL(RW);
12462 }
12463 else
12464 {
12465 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12466 IEM_MC_CALL_CIMPL_5(IEM_CIMPL_F_STATUS_FLAGS,
12467 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12468 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12469 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx,
12470 pEFlags, bUnmapInfo);
12471 IEM_MC_END();
12472 }
12473 }
12474
12475#elif defined(RT_ARCH_ARM64)
12476 /** @todo may require fallback for unaligned accesses... */
12477 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12478 {
12479 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12480 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12481 BODY_CMPXCHG16B_TAIL(RW);
12482 }
12483 else
12484 {
12485 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12486 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12487 BODY_CMPXCHG16B_TAIL(ATOMIC);
12488 }
12489
12490#else
12491 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12492 accesses and not all all atomic, which works fine on in UNI CPU guest
12493 configuration (ignoring DMA). If guest SMP is active we have no choice
12494 but to use a rendezvous callback here. Sigh. */
12495 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12496 {
12497 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12498 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12499 BODY_CMPXCHG16B_TAIL(RW);
12500 }
12501 else
12502 {
12503 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12504 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12505 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12506 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12507 iemCImpl_cmpxchg16b_fallback_rendezvous,
12508 pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12509 IEM_MC_END();
12510 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12511 }
12512#endif
12513
12514#undef BODY_CMPXCHG16B
12515 }
12516 Log(("cmpxchg16b -> #UD\n"));
12517 IEMOP_RAISE_INVALID_OPCODE_RET();
12518}
12519
12520FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12521{
12522 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12523 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12524 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12525}
12526
12527
12528/** Opcode 0x0f 0xc7 11/6. */
12529FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12530{
12531 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12532 IEMOP_RAISE_INVALID_OPCODE_RET();
12533
12534 if (IEM_IS_MODRM_REG_MODE(bRm))
12535 {
12536 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12538 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12539 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12540 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12541 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12542 iemCImpl_rdrand, iReg, enmEffOpSize);
12543 IEM_MC_END();
12544 }
12545 /* Register only. */
12546 else
12547 IEMOP_RAISE_INVALID_OPCODE_RET();
12548}
12549
12550/** Opcode 0x0f 0xc7 !11/6. */
12551#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12552FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12553{
12554 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12555 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12556 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12557 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12558 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12560 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12561 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12562 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12563 IEM_MC_END();
12564}
12565#else
12566FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12567#endif
12568
12569/** Opcode 0x66 0x0f 0xc7 !11/6. */
12570#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12571FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12572{
12573 IEMOP_MNEMONIC(vmclear, "vmclear");
12574 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12575 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12576 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12577 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12579 IEMOP_HLP_DONE_DECODING();
12580 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12581 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12582 IEM_MC_END();
12583}
12584#else
12585FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12586#endif
12587
12588/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12589#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12590FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12591{
12592 IEMOP_MNEMONIC(vmxon, "vmxon");
12593 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12594 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12595 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12597 IEMOP_HLP_DONE_DECODING();
12598 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12599 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12600 IEM_MC_END();
12601}
12602#else
12603FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12604#endif
12605
12606/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12607#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12608FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12609{
12610 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12611 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12612 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12613 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12614 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12616 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12617 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12618 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12619 IEM_MC_END();
12620}
12621#else
12622FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12623#endif
12624
12625/** Opcode 0x0f 0xc7 11/7. */
12626FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12627{
12628 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12629 IEMOP_RAISE_INVALID_OPCODE_RET();
12630
12631 if (IEM_IS_MODRM_REG_MODE(bRm))
12632 {
12633 /* register destination. */
12634 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12636 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12637 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12638 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12639 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12640 iemCImpl_rdseed, iReg, enmEffOpSize);
12641 IEM_MC_END();
12642 }
12643 /* Register only. */
12644 else
12645 IEMOP_RAISE_INVALID_OPCODE_RET();
12646}
12647
12648/**
12649 * Group 9 jump table for register variant.
12650 */
12651IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12652{ /* pfx: none, 066h, 0f3h, 0f2h */
12653 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12654 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12655 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12656 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12657 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12658 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12659 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12660 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12661};
12662AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12663
12664
12665/**
12666 * Group 9 jump table for memory variant.
12667 */
12668IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12669{ /* pfx: none, 066h, 0f3h, 0f2h */
12670 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12671 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12672 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12673 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12674 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12675 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12676 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12677 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12678};
12679AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12680
12681
12682/** Opcode 0x0f 0xc7. */
12683FNIEMOP_DEF(iemOp_Grp9)
12684{
12685 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12686 if (IEM_IS_MODRM_REG_MODE(bRm))
12687 /* register, register */
12688 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12689 + pVCpu->iem.s.idxPrefix], bRm);
12690 /* memory, register */
12691 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12692 + pVCpu->iem.s.idxPrefix], bRm);
12693}
12694
12695
12696/**
12697 * Common 'bswap register' helper.
12698 */
12699FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12700{
12701 switch (pVCpu->iem.s.enmEffOpSize)
12702 {
12703 case IEMMODE_16BIT:
12704 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
12705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12706 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12707 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12708 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12709 IEM_MC_ADVANCE_RIP_AND_FINISH();
12710 IEM_MC_END();
12711 break;
12712
12713 case IEMMODE_32BIT:
12714 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
12715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12716 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12717 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12718 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12719 IEM_MC_CLEAR_HIGH_GREG_U64(iReg);
12720 IEM_MC_ADVANCE_RIP_AND_FINISH();
12721 IEM_MC_END();
12722 break;
12723
12724 case IEMMODE_64BIT:
12725 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
12726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12727 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12728 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12729 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12730 IEM_MC_ADVANCE_RIP_AND_FINISH();
12731 IEM_MC_END();
12732 break;
12733
12734 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12735 }
12736}
12737
12738
12739/** Opcode 0x0f 0xc8. */
12740FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12741{
12742 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12743 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12744 prefix. REX.B is the correct prefix it appears. For a parallel
12745 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12746 IEMOP_HLP_MIN_486();
12747 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12748}
12749
12750
12751/** Opcode 0x0f 0xc9. */
12752FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12753{
12754 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12755 IEMOP_HLP_MIN_486();
12756 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12757}
12758
12759
12760/** Opcode 0x0f 0xca. */
12761FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12762{
12763 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12764 IEMOP_HLP_MIN_486();
12765 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12766}
12767
12768
12769/** Opcode 0x0f 0xcb. */
12770FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12771{
12772 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12773 IEMOP_HLP_MIN_486();
12774 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12775}
12776
12777
12778/** Opcode 0x0f 0xcc. */
12779FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12780{
12781 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12782 IEMOP_HLP_MIN_486();
12783 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12784}
12785
12786
12787/** Opcode 0x0f 0xcd. */
12788FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12789{
12790 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12791 IEMOP_HLP_MIN_486();
12792 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12793}
12794
12795
12796/** Opcode 0x0f 0xce. */
12797FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12798{
12799 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12800 IEMOP_HLP_MIN_486();
12801 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12802}
12803
12804
12805/** Opcode 0x0f 0xcf. */
12806FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12807{
12808 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12809 IEMOP_HLP_MIN_486();
12810 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12811}
12812
12813
12814/* Opcode 0x0f 0xd0 - invalid */
12815
12816
12817/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12818FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12819{
12820 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12821 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12822}
12823
12824
12825/* Opcode 0xf3 0x0f 0xd0 - invalid */
12826
12827
12828/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12829FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12830{
12831 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12832 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12833}
12834
12835
12836
12837/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12838FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12839{
12840 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12841 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12842}
12843
12844/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12845FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12846{
12847 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12848 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12849}
12850
12851/* Opcode 0xf3 0x0f 0xd1 - invalid */
12852/* Opcode 0xf2 0x0f 0xd1 - invalid */
12853
12854/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12855FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12856{
12857 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12858 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12859}
12860
12861
12862/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12863FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12864{
12865 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12866 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12867}
12868
12869
12870/* Opcode 0xf3 0x0f 0xd2 - invalid */
12871/* Opcode 0xf2 0x0f 0xd2 - invalid */
12872
12873/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12874FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12875{
12876 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12877 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12878}
12879
12880
12881/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12882FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12883{
12884 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12885 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12886}
12887
12888
12889/* Opcode 0xf3 0x0f 0xd3 - invalid */
12890/* Opcode 0xf2 0x0f 0xd3 - invalid */
12891
12892
12893/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12894FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12895{
12896 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12897 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
12898}
12899
12900
12901/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12902FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12903{
12904 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12905 SSE2_OPT_BODY_FullFull_To_Full(paddq, iemAImpl_paddq_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
12906}
12907
12908
12909/* Opcode 0xf3 0x0f 0xd4 - invalid */
12910/* Opcode 0xf2 0x0f 0xd4 - invalid */
12911
12912/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12913FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12914{
12915 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12916 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmullw_u64);
12917}
12918
12919/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12920FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12921{
12922 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12923 SSE2_OPT_BODY_FullFull_To_Full(pmullw, iemAImpl_pmullw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
12924}
12925
12926
12927/* Opcode 0xf3 0x0f 0xd5 - invalid */
12928/* Opcode 0xf2 0x0f 0xd5 - invalid */
12929
12930/* Opcode 0x0f 0xd6 - invalid */
12931
12932/**
12933 * @opcode 0xd6
12934 * @oppfx 0x66
12935 * @opcpuid sse2
12936 * @opgroup og_sse2_pcksclr_datamove
12937 * @opxcpttype none
12938 * @optest op1=-1 op2=2 -> op1=2
12939 * @optest op1=0 op2=-42 -> op1=-42
12940 */
12941FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12942{
12943 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12944 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12945 if (IEM_IS_MODRM_REG_MODE(bRm))
12946 {
12947 /*
12948 * Register, register.
12949 */
12950 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12952 IEM_MC_LOCAL(uint64_t, uSrc);
12953
12954 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12955 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12956
12957 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12958 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12959
12960 IEM_MC_ADVANCE_RIP_AND_FINISH();
12961 IEM_MC_END();
12962 }
12963 else
12964 {
12965 /*
12966 * Memory, register.
12967 */
12968 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12969 IEM_MC_LOCAL(uint64_t, uSrc);
12970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12971
12972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12974 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12975 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12976
12977 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12978 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12979
12980 IEM_MC_ADVANCE_RIP_AND_FINISH();
12981 IEM_MC_END();
12982 }
12983}
12984
12985
12986/**
12987 * @opcode 0xd6
12988 * @opcodesub 11 mr/reg
12989 * @oppfx f3
12990 * @opcpuid sse2
12991 * @opgroup og_sse2_simdint_datamove
12992 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12993 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12994 */
12995FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12996{
12997 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12998 if (IEM_IS_MODRM_REG_MODE(bRm))
12999 {
13000 /*
13001 * Register, register.
13002 */
13003 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13004 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13006 IEM_MC_LOCAL(uint64_t, uSrc);
13007
13008 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13009 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13010 IEM_MC_FPU_TO_MMX_MODE();
13011
13012 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
13013 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
13014
13015 IEM_MC_ADVANCE_RIP_AND_FINISH();
13016 IEM_MC_END();
13017 }
13018
13019 /**
13020 * @opdone
13021 * @opmnemonic udf30fd6mem
13022 * @opcode 0xd6
13023 * @opcodesub !11 mr/reg
13024 * @oppfx f3
13025 * @opunused intel-modrm
13026 * @opcpuid sse
13027 * @optest ->
13028 */
13029 else
13030 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13031}
13032
13033
13034/**
13035 * @opcode 0xd6
13036 * @opcodesub 11 mr/reg
13037 * @oppfx f2
13038 * @opcpuid sse2
13039 * @opgroup og_sse2_simdint_datamove
13040 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13041 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13042 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
13043 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
13044 * @optest op1=-42 op2=0xfedcba9876543210
13045 * -> op1=0xfedcba9876543210 ftw=0xff
13046 */
13047FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
13048{
13049 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13050 if (IEM_IS_MODRM_REG_MODE(bRm))
13051 {
13052 /*
13053 * Register, register.
13054 */
13055 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13056 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13058 IEM_MC_LOCAL(uint64_t, uSrc);
13059
13060 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13061 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13062 IEM_MC_FPU_TO_MMX_MODE();
13063
13064 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
13065 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
13066
13067 IEM_MC_ADVANCE_RIP_AND_FINISH();
13068 IEM_MC_END();
13069 }
13070
13071 /**
13072 * @opdone
13073 * @opmnemonic udf20fd6mem
13074 * @opcode 0xd6
13075 * @opcodesub !11 mr/reg
13076 * @oppfx f2
13077 * @opunused intel-modrm
13078 * @opcpuid sse
13079 * @optest ->
13080 */
13081 else
13082 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13083}
13084
13085
13086/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
13087FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
13088{
13089 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13090 /* Docs says register only. */
13091 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13092 {
13093 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13094 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
13095 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13097 IEM_MC_ARG(uint64_t *, puDst, 0);
13098 IEM_MC_ARG(uint64_t const *, puSrc, 1);
13099 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13100 IEM_MC_PREPARE_FPU_USAGE();
13101 IEM_MC_FPU_TO_MMX_MODE();
13102
13103 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13104 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
13105 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
13106
13107 IEM_MC_ADVANCE_RIP_AND_FINISH();
13108 IEM_MC_END();
13109 }
13110 else
13111 IEMOP_RAISE_INVALID_OPCODE_RET();
13112}
13113
13114
13115/** Opcode 0x66 0x0f 0xd7 - */
13116FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
13117{
13118 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13119 /* Docs says register only. */
13120 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13121 {
13122 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13123 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
13124 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13126 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13127 IEM_MC_PREPARE_SSE_USAGE();
13128 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
13129 IEM_MC_LIVENESS_GREG_CLOBBER(IEM_GET_MODRM_REG(pVCpu, bRm));
13130 IEM_MC_LIVENESS_XREG_INPUT(IEM_GET_MODRM_RM(pVCpu, bRm));
13131 IEM_MC_NATIVE_EMIT_2(iemNativeEmit_pmovmskb_rr_u128, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
13132 } IEM_MC_NATIVE_ELSE() {
13133 IEM_MC_ARG(uint64_t *, puDst, 0);
13134 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
13135 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13136 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
13137 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
13138 } IEM_MC_NATIVE_ENDIF();
13139 IEM_MC_ADVANCE_RIP_AND_FINISH();
13140 IEM_MC_END();
13141 }
13142 else
13143 IEMOP_RAISE_INVALID_OPCODE_RET();
13144}
13145
13146
13147/* Opcode 0xf3 0x0f 0xd7 - invalid */
13148/* Opcode 0xf2 0x0f 0xd7 - invalid */
13149
13150
13151/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
13152FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
13153{
13154 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13155 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubusb_u64);
13156}
13157
13158
13159/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
13160FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
13161{
13162 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13163 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubusb_u128);
13164}
13165
13166
13167/* Opcode 0xf3 0x0f 0xd8 - invalid */
13168/* Opcode 0xf2 0x0f 0xd8 - invalid */
13169
13170/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13171FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13172{
13173 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13174 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubusw_u64);
13175}
13176
13177
13178/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13179FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13180{
13181 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13182 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubusw_u128);
13183}
13184
13185
13186/* Opcode 0xf3 0x0f 0xd9 - invalid */
13187/* Opcode 0xf2 0x0f 0xd9 - invalid */
13188
13189/** Opcode 0x0f 0xda - pminub Pq, Qq */
13190FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13191{
13192 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13193 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pminub_u64);
13194}
13195
13196
13197/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13198FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13199{
13200 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13201 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pminub_u128);
13202}
13203
13204/* Opcode 0xf3 0x0f 0xda - invalid */
13205/* Opcode 0xf2 0x0f 0xda - invalid */
13206
13207/** Opcode 0x0f 0xdb - pand Pq, Qq */
13208FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13209{
13210 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13211 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pand_u64);
13212}
13213
13214
13215/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13216FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13217{
13218 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13219 SSE2_OPT_BODY_FullFull_To_Full(pand, iemAImpl_pand_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13220}
13221
13222
13223/* Opcode 0xf3 0x0f 0xdb - invalid */
13224/* Opcode 0xf2 0x0f 0xdb - invalid */
13225
13226/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13227FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13228{
13229 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13230 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddusb_u64);
13231}
13232
13233
13234/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13235FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13236{
13237 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13238 SSE2_OPT_BODY_FullFull_To_Full(paddusb, iemAImpl_paddusb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13239}
13240
13241
13242/* Opcode 0xf3 0x0f 0xdc - invalid */
13243/* Opcode 0xf2 0x0f 0xdc - invalid */
13244
13245/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13246FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13247{
13248 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13249 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddusw_u64);
13250}
13251
13252
13253/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13254FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13255{
13256 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13257 SSE2_OPT_BODY_FullFull_To_Full(paddusw, iemAImpl_paddusw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13258}
13259
13260
13261/* Opcode 0xf3 0x0f 0xdd - invalid */
13262/* Opcode 0xf2 0x0f 0xdd - invalid */
13263
13264/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13265FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13266{
13267 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13268 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmaxub_u64);
13269}
13270
13271
13272/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13273FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13274{
13275 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13276 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaxub_u128);
13277}
13278
13279/* Opcode 0xf3 0x0f 0xde - invalid */
13280/* Opcode 0xf2 0x0f 0xde - invalid */
13281
13282
13283/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13284FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13285{
13286 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13287 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pandn_u64);
13288}
13289
13290
13291/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13292FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13293{
13294 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13295 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pandn_u128);
13296}
13297
13298
13299/* Opcode 0xf3 0x0f 0xdf - invalid */
13300/* Opcode 0xf2 0x0f 0xdf - invalid */
13301
13302/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13303FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13304{
13305 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13306 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13307}
13308
13309
13310/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13311FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13312{
13313 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13314 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13315}
13316
13317
13318/* Opcode 0xf3 0x0f 0xe0 - invalid */
13319/* Opcode 0xf2 0x0f 0xe0 - invalid */
13320
13321/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13322FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13323{
13324 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13325 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13326}
13327
13328
13329/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13330FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13331{
13332 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13333 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13334}
13335
13336
13337/* Opcode 0xf3 0x0f 0xe1 - invalid */
13338/* Opcode 0xf2 0x0f 0xe1 - invalid */
13339
13340/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13341FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13342{
13343 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13344 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13345}
13346
13347
13348/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13349FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13350{
13351 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13352 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13353}
13354
13355
13356/* Opcode 0xf3 0x0f 0xe2 - invalid */
13357/* Opcode 0xf2 0x0f 0xe2 - invalid */
13358
13359/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13360FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13361{
13362 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13363 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13364}
13365
13366
13367/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13368FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13369{
13370 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13371 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13372}
13373
13374
13375/* Opcode 0xf3 0x0f 0xe3 - invalid */
13376/* Opcode 0xf2 0x0f 0xe3 - invalid */
13377
13378/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13379FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13380{
13381 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13382 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13383}
13384
13385
13386/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13387FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13388{
13389 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13390 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13391}
13392
13393
13394/* Opcode 0xf3 0x0f 0xe4 - invalid */
13395/* Opcode 0xf2 0x0f 0xe4 - invalid */
13396
13397/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13398FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13399{
13400 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13401 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmulhw_u64);
13402}
13403
13404
13405/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13406FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13407{
13408 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13409 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhw_u128);
13410}
13411
13412
13413/* Opcode 0xf3 0x0f 0xe5 - invalid */
13414/* Opcode 0xf2 0x0f 0xe5 - invalid */
13415/* Opcode 0x0f 0xe6 - invalid */
13416
13417
13418/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13419FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13420{
13421 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13422 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13423}
13424
13425
13426/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13427FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13428{
13429 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13430 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13431}
13432
13433
13434/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13435FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13436{
13437 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13438 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13439}
13440
13441
13442/**
13443 * @opcode 0xe7
13444 * @opcodesub !11 mr/reg
13445 * @oppfx none
13446 * @opcpuid sse
13447 * @opgroup og_sse1_cachect
13448 * @opxcpttype none
13449 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13450 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13451 */
13452FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13453{
13454 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13456 if (IEM_IS_MODRM_MEM_MODE(bRm))
13457 {
13458 /* Register, memory. */
13459 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13460 IEM_MC_LOCAL(uint64_t, uSrc);
13461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13462
13463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13465 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13466 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13467 IEM_MC_FPU_TO_MMX_MODE();
13468
13469 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13470 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13471
13472 IEM_MC_ADVANCE_RIP_AND_FINISH();
13473 IEM_MC_END();
13474 }
13475 /**
13476 * @opdone
13477 * @opmnemonic ud0fe7reg
13478 * @opcode 0xe7
13479 * @opcodesub 11 mr/reg
13480 * @oppfx none
13481 * @opunused immediate
13482 * @opcpuid sse
13483 * @optest ->
13484 */
13485 else
13486 IEMOP_RAISE_INVALID_OPCODE_RET();
13487}
13488
13489/**
13490 * @opcode 0xe7
13491 * @opcodesub !11 mr/reg
13492 * @oppfx 0x66
13493 * @opcpuid sse2
13494 * @opgroup og_sse2_cachect
13495 * @opxcpttype 1
13496 * @optest op1=-1 op2=2 -> op1=2
13497 * @optest op1=0 op2=-42 -> op1=-42
13498 */
13499FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13500{
13501 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13503 if (IEM_IS_MODRM_MEM_MODE(bRm))
13504 {
13505 /* Register, memory. */
13506 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13507 IEM_MC_LOCAL(RTUINT128U, uSrc);
13508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13509
13510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13512 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13513 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13514
13515 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13516 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13517
13518 IEM_MC_ADVANCE_RIP_AND_FINISH();
13519 IEM_MC_END();
13520 }
13521
13522 /**
13523 * @opdone
13524 * @opmnemonic ud660fe7reg
13525 * @opcode 0xe7
13526 * @opcodesub 11 mr/reg
13527 * @oppfx 0x66
13528 * @opunused immediate
13529 * @opcpuid sse
13530 * @optest ->
13531 */
13532 else
13533 IEMOP_RAISE_INVALID_OPCODE_RET();
13534}
13535
13536/* Opcode 0xf3 0x0f 0xe7 - invalid */
13537/* Opcode 0xf2 0x0f 0xe7 - invalid */
13538
13539
13540/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13541FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13542{
13543 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13544 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubsb_u64);
13545}
13546
13547
13548/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13549FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13550{
13551 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13552 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubsb_u128);
13553}
13554
13555
13556/* Opcode 0xf3 0x0f 0xe8 - invalid */
13557/* Opcode 0xf2 0x0f 0xe8 - invalid */
13558
13559/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13560FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13561{
13562 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13563 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubsw_u64);
13564}
13565
13566
13567/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13568FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13569{
13570 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13571 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubsw_u128);
13572}
13573
13574
13575/* Opcode 0xf3 0x0f 0xe9 - invalid */
13576/* Opcode 0xf2 0x0f 0xe9 - invalid */
13577
13578
13579/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13580FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13581{
13582 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13583 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pminsw_u64);
13584}
13585
13586
13587/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13588FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13589{
13590 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13591 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pminsw_u128);
13592}
13593
13594
13595/* Opcode 0xf3 0x0f 0xea - invalid */
13596/* Opcode 0xf2 0x0f 0xea - invalid */
13597
13598
13599/** Opcode 0x0f 0xeb - por Pq, Qq */
13600FNIEMOP_DEF(iemOp_por_Pq_Qq)
13601{
13602 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13603 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_por_u64);
13604}
13605
13606
13607/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13608FNIEMOP_DEF(iemOp_por_Vx_Wx)
13609{
13610 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13611 SSE2_OPT_BODY_FullFull_To_Full(por, iemAImpl_por_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13612}
13613
13614
13615/* Opcode 0xf3 0x0f 0xeb - invalid */
13616/* Opcode 0xf2 0x0f 0xeb - invalid */
13617
13618/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13619FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13620{
13621 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13622 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddsb_u64);
13623}
13624
13625
13626/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13627FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13628{
13629 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13630 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddsb_u128);
13631}
13632
13633
13634/* Opcode 0xf3 0x0f 0xec - invalid */
13635/* Opcode 0xf2 0x0f 0xec - invalid */
13636
13637/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13638FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13639{
13640 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13641 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddsw_u64);
13642}
13643
13644
13645/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13646FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13647{
13648 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13649 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddsw_u128);
13650}
13651
13652
13653/* Opcode 0xf3 0x0f 0xed - invalid */
13654/* Opcode 0xf2 0x0f 0xed - invalid */
13655
13656
13657/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13658FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13659{
13660 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13661 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13662}
13663
13664
13665/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13666FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13667{
13668 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13669 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13670}
13671
13672
13673/* Opcode 0xf3 0x0f 0xee - invalid */
13674/* Opcode 0xf2 0x0f 0xee - invalid */
13675
13676
13677/** Opcode 0x0f 0xef - pxor Pq, Qq */
13678FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13679{
13680 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13681 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pxor_u64);
13682}
13683
13684
13685/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13686FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13687{
13688 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13689 SSE2_OPT_BODY_FullFull_To_Full(pxor, iemAImpl_pxor_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13690}
13691
13692
13693/* Opcode 0xf3 0x0f 0xef - invalid */
13694/* Opcode 0xf2 0x0f 0xef - invalid */
13695
13696/* Opcode 0x0f 0xf0 - invalid */
13697/* Opcode 0x66 0x0f 0xf0 - invalid */
13698
13699
13700/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13701FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13702{
13703 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13705 if (IEM_IS_MODRM_REG_MODE(bRm))
13706 {
13707 /*
13708 * Register, register - (not implemented, assuming it raises \#UD).
13709 */
13710 IEMOP_RAISE_INVALID_OPCODE_RET();
13711 }
13712 else
13713 {
13714 /*
13715 * Register, memory.
13716 */
13717 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13718 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13720
13721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
13723 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13724 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13725 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13726 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13727
13728 IEM_MC_ADVANCE_RIP_AND_FINISH();
13729 IEM_MC_END();
13730 }
13731}
13732
13733
13734/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13735FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13736{
13737 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13738 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13739}
13740
13741
13742/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13743FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13744{
13745 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13746 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13747}
13748
13749
13750/* Opcode 0xf2 0x0f 0xf1 - invalid */
13751
13752/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13753FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13754{
13755 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13756 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13757}
13758
13759
13760/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13761FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13762{
13763 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13764 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13765}
13766
13767
13768/* Opcode 0xf2 0x0f 0xf2 - invalid */
13769
13770/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13771FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13772{
13773 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13774 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13775}
13776
13777
13778/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13779FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13780{
13781 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13782 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13783}
13784
13785/* Opcode 0xf2 0x0f 0xf3 - invalid */
13786
13787/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13788FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13789{
13790 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13791 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmuludq_u64);
13792}
13793
13794
13795/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13796FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13797{
13798 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13799 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmuludq_u128);
13800}
13801
13802
13803/* Opcode 0xf2 0x0f 0xf4 - invalid */
13804
13805/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13806FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13807{
13808 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13809 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13810}
13811
13812
13813/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13814FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13815{
13816 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13817 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13818}
13819
13820/* Opcode 0xf2 0x0f 0xf5 - invalid */
13821
13822/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13823FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13824{
13825 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13826 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13827}
13828
13829
13830/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13831FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13832{
13833 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13834 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13835}
13836
13837
13838/* Opcode 0xf2 0x0f 0xf6 - invalid */
13839
13840/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13841FNIEMOP_DEF(iemOp_maskmovq_Pq_Nq)
13842{
13843// IEMOP_MNEMONIC2(RM, MASKMOVQ, maskmovq, Pq, Nq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
13844 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13845 if (IEM_IS_MODRM_REG_MODE(bRm))
13846 {
13847 /*
13848 * MMX, MMX, (implicit) [ ER]DI
13849 */
13850 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13852 IEM_MC_LOCAL( uint64_t, u64EffAddr);
13853 IEM_MC_LOCAL( uint64_t, u64Mem);
13854 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Mem, u64Mem, 0);
13855 IEM_MC_ARG( uint64_t const *, puSrc, 1);
13856 IEM_MC_ARG( uint64_t const *, puMsk, 2);
13857 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13858 IEM_MC_PREPARE_FPU_USAGE();
13859 IEM_MC_FPU_TO_MMX_MODE();
13860
13861 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
13862 IEM_MC_FETCH_MEM_U64(u64Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
13863 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_REG_8(bRm));
13864 IEM_MC_REF_MREG_U64_CONST(puMsk, IEM_GET_MODRM_RM_8(bRm));
13865 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovq_u64, pu64Mem, puSrc, puMsk);
13866 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, u64EffAddr, u64Mem);
13867
13868 IEM_MC_ADVANCE_RIP_AND_FINISH();
13869 IEM_MC_END();
13870 }
13871 else
13872 {
13873 /* The memory, register encoding is invalid. */
13874 IEMOP_RAISE_INVALID_OPCODE_RET();
13875 }
13876}
13877
13878
13879/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13880FNIEMOP_DEF(iemOp_maskmovdqu_Vdq_Udq)
13881{
13882// IEMOP_MNEMONIC2(RM, MASKMOVDQU, maskmovdqu, Vdq, Udq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
13883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13884 if (IEM_IS_MODRM_REG_MODE(bRm))
13885 {
13886 /*
13887 * XMM, XMM, (implicit) [ ER]DI
13888 */
13889 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13891 IEM_MC_LOCAL( uint64_t, u64EffAddr);
13892 IEM_MC_LOCAL( RTUINT128U, u128Mem);
13893 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128Mem, u128Mem, 0);
13894 IEM_MC_ARG( PCRTUINT128U, puSrc, 1);
13895 IEM_MC_ARG( PCRTUINT128U, puMsk, 2);
13896 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13897 IEM_MC_PREPARE_SSE_USAGE();
13898
13899 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
13900 IEM_MC_FETCH_MEM_U128(u128Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
13901 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13902 IEM_MC_REF_XREG_U128_CONST(puMsk, IEM_GET_MODRM_RM(pVCpu, bRm));
13903 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovdqu_u128, pu128Mem, puSrc, puMsk);
13904 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, u64EffAddr, u128Mem);
13905
13906 IEM_MC_ADVANCE_RIP_AND_FINISH();
13907 IEM_MC_END();
13908 }
13909 else
13910 {
13911 /* The memory, register encoding is invalid. */
13912 IEMOP_RAISE_INVALID_OPCODE_RET();
13913 }
13914}
13915
13916
13917/* Opcode 0xf2 0x0f 0xf7 - invalid */
13918
13919
13920/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13921FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13922{
13923 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13924 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubb_u64);
13925}
13926
13927
13928/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13929FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13930{
13931 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13932 SSE2_OPT_BODY_FullFull_To_Full(psubb, iemAImpl_psubb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13933}
13934
13935
13936/* Opcode 0xf2 0x0f 0xf8 - invalid */
13937
13938
13939/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13940FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13941{
13942 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13943 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubw_u64);
13944}
13945
13946
13947/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13948FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13949{
13950 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13951 SSE2_OPT_BODY_FullFull_To_Full(psubw, iemAImpl_psubw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13952}
13953
13954
13955/* Opcode 0xf2 0x0f 0xf9 - invalid */
13956
13957
13958/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13959FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13960{
13961 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13962 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubd_u64);
13963}
13964
13965
13966/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13967FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13968{
13969 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13970 SSE2_OPT_BODY_FullFull_To_Full(psubd, iemAImpl_psubd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13971}
13972
13973
13974/* Opcode 0xf2 0x0f 0xfa - invalid */
13975
13976
13977/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13978FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13979{
13980 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13981 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
13982}
13983
13984
13985/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13986FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13987{
13988 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13989 SSE2_OPT_BODY_FullFull_To_Full(psubq, iemAImpl_psubq_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13990}
13991
13992
13993/* Opcode 0xf2 0x0f 0xfb - invalid */
13994
13995
13996/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13997FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13998{
13999 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14000 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddb_u64);
14001}
14002
14003
14004/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
14005FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
14006{
14007 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14008 SSE2_OPT_BODY_FullFull_To_Full(paddb, iemAImpl_paddb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14009}
14010
14011
14012/* Opcode 0xf2 0x0f 0xfc - invalid */
14013
14014
14015/** Opcode 0x0f 0xfd - paddw Pq, Qq */
14016FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
14017{
14018 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14019 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddw_u64);
14020}
14021
14022
14023/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
14024FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
14025{
14026 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14027 SSE2_OPT_BODY_FullFull_To_Full(paddw, iemAImpl_paddw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14028}
14029
14030
14031/* Opcode 0xf2 0x0f 0xfd - invalid */
14032
14033
14034/** Opcode 0x0f 0xfe - paddd Pq, Qq */
14035FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
14036{
14037 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14038 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddd_u64);
14039}
14040
14041
14042/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
14043FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
14044{
14045 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14046 SSE2_OPT_BODY_FullFull_To_Full(paddd, iemAImpl_paddd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14047}
14048
14049
14050/* Opcode 0xf2 0x0f 0xfe - invalid */
14051
14052
14053/** Opcode **** 0x0f 0xff - UD0 */
14054FNIEMOP_DEF(iemOp_ud0)
14055{
14056 IEMOP_MNEMONIC(ud0, "ud0");
14057 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
14058 {
14059 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
14060 if (IEM_IS_MODRM_MEM_MODE(bRm))
14061 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
14062 }
14063 IEMOP_HLP_DONE_DECODING();
14064 IEMOP_RAISE_INVALID_OPCODE_RET();
14065}
14066
14067
14068
14069/**
14070 * Two byte opcode map, first byte 0x0f.
14071 *
14072 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
14073 * check if it needs updating as well when making changes.
14074 */
14075const PFNIEMOP g_apfnTwoByteMap[] =
14076{
14077 /* no prefix, 066h prefix f3h prefix, f2h prefix */
14078 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
14079 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
14080 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
14081 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
14082 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
14083 /* 0x05 */ IEMOP_X4(iemOp_syscall),
14084 /* 0x06 */ IEMOP_X4(iemOp_clts),
14085 /* 0x07 */ IEMOP_X4(iemOp_sysret),
14086 /* 0x08 */ IEMOP_X4(iemOp_invd),
14087 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
14088 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
14089 /* 0x0b */ IEMOP_X4(iemOp_ud2),
14090 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
14091 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
14092 /* 0x0e */ IEMOP_X4(iemOp_femms),
14093 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
14094
14095 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
14096 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
14097 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
14098 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14099 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14100 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14101 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
14102 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14103 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
14104 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
14105 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
14106 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
14107 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
14108 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
14109 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
14110 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
14111
14112 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
14113 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
14114 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
14115 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
14116 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
14117 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14118 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
14119 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14120 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14121 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14122 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
14123 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14124 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
14125 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
14126 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14127 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14128
14129 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
14130 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
14131 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
14132 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
14133 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
14134 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
14135 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
14136 /* 0x37 */ IEMOP_X4(iemOp_getsec),
14137 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
14138 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14139 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
14140 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14141 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14142 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14143 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14144 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14145
14146 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
14147 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
14148 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
14149 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
14150 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
14151 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
14152 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
14153 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
14154 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
14155 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
14156 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
14157 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
14158 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
14159 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
14160 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
14161 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
14162
14163 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14164 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
14165 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
14166 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
14167 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14168 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14169 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14170 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14171 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
14172 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
14173 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
14174 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
14175 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
14176 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
14177 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
14178 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
14179
14180 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14181 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14182 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14183 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14184 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14185 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14186 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14187 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14188 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14189 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14190 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14191 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14192 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14193 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14194 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14195 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
14196
14197 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
14198 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
14199 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
14200 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
14201 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14202 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14203 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14204 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14205
14206 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14207 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14208 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14209 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14210 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
14211 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
14212 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
14213 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
14214
14215 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
14216 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
14217 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
14218 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
14219 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
14220 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
14221 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
14222 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
14223 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
14224 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
14225 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
14226 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
14227 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14228 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14229 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14230 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14231
14232 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14233 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14234 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14235 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14236 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14237 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14238 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14239 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14240 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14241 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14242 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14243 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14244 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14245 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14246 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14247 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14248
14249 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14250 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14251 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14252 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14253 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14254 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14255 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14256 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14257 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14258 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14259 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14260 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14261 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14262 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14263 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14264 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14265
14266 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14267 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14268 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14269 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14270 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14271 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14272 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14273 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14274 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14275 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14276 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14277 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14278 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14279 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14280 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14281 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14282
14283 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14284 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14285 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14286 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14287 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14288 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14289 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14290 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14291 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14292 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14293 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14294 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14295 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14296 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14297 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14298 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14299
14300 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14301 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14302 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14303 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14304 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14305 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14306 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14307 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14308 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14309 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14310 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14311 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14312 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14313 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14314 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14315 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14316
14317 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14318 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14319 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14320 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14321 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14322 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14323 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14324 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14325 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14326 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14327 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14328 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14329 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14330 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14331 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14332 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14333
14334 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14335 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14336 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14337 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14338 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14339 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14340 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14341 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14342 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14343 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14344 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14345 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14346 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14347 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14348 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14349 /* 0xff */ IEMOP_X4(iemOp_ud0),
14350};
14351AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14352
14353/** @} */
14354
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette