VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h@ 106695

Last change on this file since 106695 was 106695, checked in by vboxsync, 4 weeks ago

ValidationKit/bootsectors: Implement SIMD FP testcases for cvtpi2ps, and fix it in IEM [build fix]; bugref:10658; jiraref:VBP-1206

  • turns out IEM cvtpi2ps [mem] needs to leave FTW entirely alone
  • I had misunderstood some test results...
  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 518.3 KB
Line 
1/* $Id: IEMAllInstTwoByte0f.cpp.h 106695 2024-10-25 12:57:34Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2024 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 *
42 * The @a pfnU64 worker function takes no FXSAVE state, just the operands.
43 */
44FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
45{
46 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
47 if (IEM_IS_MODRM_REG_MODE(bRm))
48 {
49 /*
50 * MMX, MMX.
51 */
52 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
53 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
54 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
55 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
56 IEM_MC_ARG(uint64_t *, pDst, 0);
57 IEM_MC_ARG(uint64_t const *, pSrc, 1);
58 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
59 IEM_MC_PREPARE_FPU_USAGE();
60 IEM_MC_FPU_TO_MMX_MODE();
61
62 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
63 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
64 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
65 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
66
67 IEM_MC_ADVANCE_RIP_AND_FINISH();
68 IEM_MC_END();
69 }
70 else
71 {
72 /*
73 * MMX, [mem64].
74 */
75 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
76 IEM_MC_ARG(uint64_t *, pDst, 0);
77 IEM_MC_LOCAL(uint64_t, uSrc);
78 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
79 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
80
81 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
82 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
83 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
84 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
85
86 IEM_MC_PREPARE_FPU_USAGE();
87 IEM_MC_FPU_TO_MMX_MODE();
88
89 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
90 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
91 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
92
93 IEM_MC_ADVANCE_RIP_AND_FINISH();
94 IEM_MC_END();
95 }
96}
97
98
99/**
100 * Common worker for MMX instructions on the form:
101 * pxxx mm1, mm2/mem64
102 * for instructions introduced with SSE.
103 *
104 * The @a pfnU64 worker function takes no FXSAVE state, just the operands.
105 */
106FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
107{
108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
109 if (IEM_IS_MODRM_REG_MODE(bRm))
110 {
111 /*
112 * MMX, MMX.
113 */
114 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
115 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
116 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
118 IEM_MC_ARG(uint64_t *, pDst, 0);
119 IEM_MC_ARG(uint64_t const *, pSrc, 1);
120 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
121 IEM_MC_PREPARE_FPU_USAGE();
122 IEM_MC_FPU_TO_MMX_MODE();
123
124 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
125 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
126 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
127 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
128
129 IEM_MC_ADVANCE_RIP_AND_FINISH();
130 IEM_MC_END();
131 }
132 else
133 {
134 /*
135 * MMX, [mem64].
136 */
137 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
138 IEM_MC_ARG(uint64_t *, pDst, 0);
139 IEM_MC_LOCAL(uint64_t, uSrc);
140 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
142
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
145 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
146 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
147
148 IEM_MC_PREPARE_FPU_USAGE();
149 IEM_MC_FPU_TO_MMX_MODE();
150
151 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
152 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
153 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
154
155 IEM_MC_ADVANCE_RIP_AND_FINISH();
156 IEM_MC_END();
157 }
158}
159
160
161/**
162 * Common worker for MMX instructions on the form:
163 * pxxx mm1, mm2/mem64
164 * that was introduced with SSE2.
165 */
166FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
167{
168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
169 if (IEM_IS_MODRM_REG_MODE(bRm))
170 {
171 /*
172 * MMX, MMX.
173 */
174 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
175 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
176 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
178 IEM_MC_ARG(uint64_t *, pDst, 0);
179 IEM_MC_ARG(uint64_t const *, pSrc, 1);
180 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
181 IEM_MC_PREPARE_FPU_USAGE();
182 IEM_MC_FPU_TO_MMX_MODE();
183
184 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
185 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
186 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
187 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
188
189 IEM_MC_ADVANCE_RIP_AND_FINISH();
190 IEM_MC_END();
191 }
192 else
193 {
194 /*
195 * MMX, [mem64].
196 */
197 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
198 IEM_MC_ARG(uint64_t *, pDst, 0);
199 IEM_MC_LOCAL(uint64_t, uSrc);
200 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
202
203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
205 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
206 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
207
208 IEM_MC_PREPARE_FPU_USAGE();
209 IEM_MC_FPU_TO_MMX_MODE();
210
211 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
212 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
213 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
214
215 IEM_MC_ADVANCE_RIP_AND_FINISH();
216 IEM_MC_END();
217 }
218}
219
220
221/**
222 * Common worker for SSE instructions of the form:
223 * pxxx xmm1, xmm2/mem128
224 *
225 * Proper alignment of the 128-bit operand is enforced.
226 * SSE cpuid checks. No SIMD FP exceptions.
227 *
228 * The @a pfnU128 worker function takes no FXSAVE state, just the operands.
229 *
230 * @sa iemOpCommonSse2_FullFull_To_Full
231 */
232FNIEMOP_DEF_1(iemOpCommonSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
233{
234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
235 if (IEM_IS_MODRM_REG_MODE(bRm))
236 {
237 /*
238 * XMM, XMM.
239 */
240 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
242 IEM_MC_ARG(PRTUINT128U, pDst, 0);
243 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
244 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
245 IEM_MC_PREPARE_SSE_USAGE();
246 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
248 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
249 IEM_MC_ADVANCE_RIP_AND_FINISH();
250 IEM_MC_END();
251 }
252 else
253 {
254 /*
255 * XMM, [mem128].
256 */
257 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
258 IEM_MC_ARG(PRTUINT128U, pDst, 0);
259 IEM_MC_LOCAL(RTUINT128U, uSrc);
260 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
262
263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
265 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
266 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
267
268 IEM_MC_PREPARE_SSE_USAGE();
269 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
270 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
271
272 IEM_MC_ADVANCE_RIP_AND_FINISH();
273 IEM_MC_END();
274 }
275}
276
277
278/**
279 * Common worker for SSE2 instructions on the forms:
280 * pxxx xmm1, xmm2/mem128
281 *
282 * Proper alignment of the 128-bit operand is enforced.
283 * Exceptions type 4. SSE2 cpuid checks.
284 *
285 * The @a pfnU128 worker function takes no FXSAVE state, just the operands.
286 *
287 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
288 */
289FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
290{
291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
292 if (IEM_IS_MODRM_REG_MODE(bRm))
293 {
294 /*
295 * XMM, XMM.
296 */
297 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
299 IEM_MC_ARG(PRTUINT128U, pDst, 0);
300 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
302 IEM_MC_PREPARE_SSE_USAGE();
303 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
304 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
305 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
306 IEM_MC_ADVANCE_RIP_AND_FINISH();
307 IEM_MC_END();
308 }
309 else
310 {
311 /*
312 * XMM, [mem128].
313 */
314 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
315 IEM_MC_ARG(PRTUINT128U, pDst, 0);
316 IEM_MC_LOCAL(RTUINT128U, uSrc);
317 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
319
320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
322 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
323 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
324
325 IEM_MC_PREPARE_SSE_USAGE();
326 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
327 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
328
329 IEM_MC_ADVANCE_RIP_AND_FINISH();
330 IEM_MC_END();
331 }
332}
333
334
335/**
336 * A body preprocessor variant of iemOpCommonSse2Opt_FullFull_To_Full in order
337 * to support native emitters for certain instructions.
338 */
339#define SSE2_OPT_BODY_FullFull_To_Full(a_Ins, a_pImplExpr, a_fRegNativeArchs, a_fMemNativeArchs) \
340 PFNIEMAIMPLMEDIAOPTF2U128 const pfnU128 = (a_pImplExpr); \
341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
342 if (IEM_IS_MODRM_REG_MODE(bRm)) \
343 { \
344 /* \
345 * XMM, XMM. \
346 */ \
347 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2); \
349 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
350 IEM_MC_PREPARE_SSE_USAGE(); \
351 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
352 IEM_MC_NATIVE_EMIT_2(RT_CONCAT3(iemNativeEmit_,a_Ins,_rr_u128), IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
353 } IEM_MC_NATIVE_ELSE() { \
354 IEM_MC_ARG(PRTUINT128U, pDst, 0); \
355 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
356 IEM_MC_ARG(PCRTUINT128U, pSrc, 1); \
357 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
358 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc); \
359 } IEM_MC_NATIVE_ENDIF(); \
360 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
361 IEM_MC_END(); \
362 } \
363 else \
364 { \
365 /* \
366 * XMM, [mem128]. \
367 */ \
368 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
369 IEM_MC_LOCAL(RTUINT128U, uSrc); \
370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2); \
373 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
374 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
375 IEM_MC_PREPARE_SSE_USAGE(); \
376 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
377 IEM_MC_NATIVE_EMIT_2(RT_CONCAT3(iemNativeEmit_,a_Ins,_rv_u128), IEM_GET_MODRM_REG(pVCpu, bRm), uSrc); \
378 } IEM_MC_NATIVE_ELSE() { \
379 IEM_MC_ARG(PRTUINT128U, pDst, 0); \
380 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
381 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1); \
382 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc); \
383 } IEM_MC_NATIVE_ENDIF(); \
384 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
385 IEM_MC_END(); \
386 } void(0)
387
388
389/**
390 * Common worker for MMX instructions on the forms:
391 * pxxxx mm1, mm2/mem32
392 *
393 * The 2nd operand is the first half of a register, which in the memory case
394 * means a 32-bit memory access.
395 */
396FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
397{
398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
399 if (IEM_IS_MODRM_REG_MODE(bRm))
400 {
401 /*
402 * MMX, MMX.
403 */
404 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
406 IEM_MC_ARG(uint64_t *, puDst, 0);
407 IEM_MC_ARG(uint64_t const *, puSrc, 1);
408 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
409 IEM_MC_PREPARE_FPU_USAGE();
410 IEM_MC_FPU_TO_MMX_MODE();
411
412 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
413 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
414 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
415 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
416
417 IEM_MC_ADVANCE_RIP_AND_FINISH();
418 IEM_MC_END();
419 }
420 else
421 {
422 /*
423 * MMX, [mem32].
424 */
425 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
426 IEM_MC_ARG(uint64_t *, puDst, 0);
427 IEM_MC_LOCAL(uint64_t, uSrc);
428 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
430
431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
433 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
434 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
435
436 IEM_MC_PREPARE_FPU_USAGE();
437 IEM_MC_FPU_TO_MMX_MODE();
438
439 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
440 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
441 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
442
443 IEM_MC_ADVANCE_RIP_AND_FINISH();
444 IEM_MC_END();
445 }
446}
447
448
449/**
450 * Common worker for SSE instructions on the forms:
451 * pxxxx xmm1, xmm2/mem128
452 *
453 * The 2nd operand is the first half of a register, which in the memory case
454 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
455 *
456 * Exceptions type 4.
457 */
458FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
459{
460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
461 if (IEM_IS_MODRM_REG_MODE(bRm))
462 {
463 /*
464 * XMM, XMM.
465 */
466 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
468 IEM_MC_ARG(PRTUINT128U, puDst, 0);
469 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
470 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
471 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
472 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
473 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
474 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
475 IEM_MC_ADVANCE_RIP_AND_FINISH();
476 IEM_MC_END();
477 }
478 else
479 {
480 /*
481 * XMM, [mem128].
482 */
483 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
484 IEM_MC_ARG(PRTUINT128U, puDst, 0);
485 IEM_MC_LOCAL(RTUINT128U, uSrc);
486 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
488
489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
491 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
492 /** @todo Most CPUs probably only read the low qword. We read everything to
493 * make sure we apply segmentation and alignment checks correctly.
494 * When we have time, it would be interesting to explore what real
495 * CPUs actually does and whether it will do a TLB load for the high
496 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
497 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
498
499 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
500 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
501 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
502
503 IEM_MC_ADVANCE_RIP_AND_FINISH();
504 IEM_MC_END();
505 }
506}
507
508
509/**
510 * Common worker for SSE2 instructions on the forms:
511 * pxxxx xmm1, xmm2/mem128
512 *
513 * The 2nd operand is the first half of a register, which in the memory case
514 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
515 *
516 * Exceptions type 4.
517 */
518FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
519{
520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
521 if (IEM_IS_MODRM_REG_MODE(bRm))
522 {
523 /*
524 * XMM, XMM.
525 */
526 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
528 IEM_MC_ARG(PRTUINT128U, puDst, 0);
529 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
530 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
531 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
532 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
533 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
534 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
535 IEM_MC_ADVANCE_RIP_AND_FINISH();
536 IEM_MC_END();
537 }
538 else
539 {
540 /*
541 * XMM, [mem128].
542 */
543 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
544 IEM_MC_ARG(PRTUINT128U, puDst, 0);
545 IEM_MC_LOCAL(RTUINT128U, uSrc);
546 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
548
549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
551 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
552 /** @todo Most CPUs probably only read the low qword. We read everything to
553 * make sure we apply segmentation and alignment checks correctly.
554 * When we have time, it would be interesting to explore what real
555 * CPUs actually does and whether it will do a TLB load for the high
556 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
557 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
558
559 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
560 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
562
563 IEM_MC_ADVANCE_RIP_AND_FINISH();
564 IEM_MC_END();
565 }
566}
567
568
569/**
570 * Common worker for MMX instructions on the form:
571 * pxxxx mm1, mm2/mem64
572 *
573 * The 2nd operand is the second half of a register, which in the memory case
574 * means a 64-bit memory access for MMX.
575 */
576FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
577{
578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
579 if (IEM_IS_MODRM_REG_MODE(bRm))
580 {
581 /*
582 * MMX, MMX.
583 */
584 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
585 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
586 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
588 IEM_MC_ARG(uint64_t *, puDst, 0);
589 IEM_MC_ARG(uint64_t const *, puSrc, 1);
590 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
591 IEM_MC_PREPARE_FPU_USAGE();
592 IEM_MC_FPU_TO_MMX_MODE();
593
594 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
595 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
596 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
597 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
598
599 IEM_MC_ADVANCE_RIP_AND_FINISH();
600 IEM_MC_END();
601 }
602 else
603 {
604 /*
605 * MMX, [mem64].
606 */
607 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
608 IEM_MC_ARG(uint64_t *, puDst, 0);
609 IEM_MC_LOCAL(uint64_t, uSrc);
610 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
612
613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
615 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
616 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
617
618 IEM_MC_PREPARE_FPU_USAGE();
619 IEM_MC_FPU_TO_MMX_MODE();
620
621 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
623 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
624
625 IEM_MC_ADVANCE_RIP_AND_FINISH();
626 IEM_MC_END();
627 }
628}
629
630
631/**
632 * Common worker for SSE instructions on the form:
633 * pxxxx xmm1, xmm2/mem128
634 *
635 * The 2nd operand is the second half of a register, which for SSE a 128-bit
636 * aligned access where it may read the full 128 bits or only the upper 64 bits.
637 *
638 * Exceptions type 4.
639 */
640FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
641{
642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
643 if (IEM_IS_MODRM_REG_MODE(bRm))
644 {
645 /*
646 * XMM, XMM.
647 */
648 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
650 IEM_MC_ARG(PRTUINT128U, puDst, 0);
651 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
652 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
653 IEM_MC_PREPARE_SSE_USAGE();
654 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
655 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
656 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
657 IEM_MC_ADVANCE_RIP_AND_FINISH();
658 IEM_MC_END();
659 }
660 else
661 {
662 /*
663 * XMM, [mem128].
664 */
665 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
666 IEM_MC_ARG(PRTUINT128U, puDst, 0);
667 IEM_MC_LOCAL(RTUINT128U, uSrc);
668 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
670
671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
673 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
674 /** @todo Most CPUs probably only read the high qword. We read everything to
675 * make sure we apply segmentation and alignment checks correctly.
676 * When we have time, it would be interesting to explore what real
677 * CPUs actually does and whether it will do a TLB load for the lower
678 * part or skip any associated \#PF. */
679 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
680
681 IEM_MC_PREPARE_SSE_USAGE();
682 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
683 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
684
685 IEM_MC_ADVANCE_RIP_AND_FINISH();
686 IEM_MC_END();
687 }
688}
689
690
691/**
692 * Common worker for SSE instructions on the forms:
693 * pxxs xmm1, xmm2/mem128
694 *
695 * Proper alignment of the 128-bit operand is enforced.
696 * Exceptions type 2. SSE cpuid checks.
697 *
698 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
699 */
700FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
701{
702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
703 if (IEM_IS_MODRM_REG_MODE(bRm))
704 {
705 /*
706 * XMM128, XMM128.
707 */
708 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
710 IEM_MC_LOCAL(X86XMMREG, SseRes);
711 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
712 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
713 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
714 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
715 IEM_MC_PREPARE_SSE_USAGE();
716 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
717 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
718 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
719 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
720
721 IEM_MC_ADVANCE_RIP_AND_FINISH();
722 IEM_MC_END();
723 }
724 else
725 {
726 /*
727 * XMM128, [mem128].
728 */
729 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
730 IEM_MC_LOCAL(X86XMMREG, SseRes);
731 IEM_MC_LOCAL(X86XMMREG, uSrc2);
732 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
733 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
734 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
736
737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
739 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
740 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
741
742 IEM_MC_PREPARE_SSE_USAGE();
743 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
744 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
745 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
746
747 IEM_MC_ADVANCE_RIP_AND_FINISH();
748 IEM_MC_END();
749 }
750}
751
752
753/**
754 * A body preprocessor variant of iemOpCommonSseFp_FullFull_To_Full in order
755 * to support native emitters for certain instructions.
756 */
757#define SSE_FP_BODY_FullFull_To_Full(a_Ins, a_pImplExpr, a_fRegNativeArchs, a_fMemNativeArchs) \
758 PFNIEMAIMPLFPSSEF2U128 const pfnU128 = (a_pImplExpr); \
759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
760 if (IEM_IS_MODRM_REG_MODE(bRm)) \
761 { \
762 /* \
763 * XMM, XMM. \
764 */ \
765 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse); \
767 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
768 IEM_MC_PREPARE_SSE_USAGE(); \
769 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
770 IEM_MC_LIVENESS_MXCSR_MODIFY(); \
771 IEM_MC_NATIVE_EMIT_2_EX(RT_CONCAT3(iemNativeEmit_,a_Ins,_rr_u128), IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
772 } IEM_MC_NATIVE_ELSE() { \
773 IEM_MC_LOCAL(X86XMMREG, SseRes); \
774 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0); \
775 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1); \
776 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
777 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2); \
778 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
779 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2); \
780 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes); \
781 } IEM_MC_NATIVE_ENDIF(); \
782 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
783 IEM_MC_END(); \
784 } \
785 else \
786 { \
787 /* \
788 * XMM, [mem128]. \
789 */ \
790 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
791 IEM_MC_LOCAL(X86XMMREG, uSrc2); \
792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse); \
795 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
796 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
797 IEM_MC_PREPARE_SSE_USAGE(); \
798 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
799 IEM_MC_LIVENESS_MXCSR_MODIFY(); \
800 IEM_MC_NATIVE_EMIT_2_EX(RT_CONCAT3(iemNativeEmit_,a_Ins,_rv_u128), IEM_GET_MODRM_REG(pVCpu, bRm), uSrc2); \
801 } IEM_MC_NATIVE_ELSE() { \
802 IEM_MC_LOCAL(X86XMMREG, SseRes); \
803 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0); \
804 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1); \
805 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2); \
806 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
807 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2); \
808 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes); \
809 } IEM_MC_NATIVE_ENDIF(); \
810 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
811 IEM_MC_END(); \
812 } void(0)
813
814
815/**
816 * Common worker for SSE instructions on the forms:
817 * pxxs xmm1, xmm2/mem32
818 *
819 * Proper alignment of the 128-bit operand is enforced.
820 * Exceptions type 3. SSE cpuid checks.
821 *
822 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
823 */
824FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
825{
826 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
827 if (IEM_IS_MODRM_REG_MODE(bRm))
828 {
829 /*
830 * XMM128, XMM32.
831 */
832 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
834 IEM_MC_LOCAL(X86XMMREG, SseRes);
835 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
836 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
837 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
838 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
839 IEM_MC_PREPARE_SSE_USAGE();
840 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
841 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
842 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
843 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
844
845 IEM_MC_ADVANCE_RIP_AND_FINISH();
846 IEM_MC_END();
847 }
848 else
849 {
850 /*
851 * XMM128, [mem32].
852 */
853 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
854 IEM_MC_LOCAL(X86XMMREG, SseRes);
855 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
856 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
857 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
858 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
860
861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
863 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
864 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
865
866 IEM_MC_PREPARE_SSE_USAGE();
867 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
868 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
869 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
870
871 IEM_MC_ADVANCE_RIP_AND_FINISH();
872 IEM_MC_END();
873 }
874}
875
876
877/**
878 * Common worker for SSE2 instructions on the forms:
879 * pxxd xmm1, xmm2/mem128
880 *
881 * Proper alignment of the 128-bit operand is enforced.
882 * Exceptions type 2. SSE cpuid checks.
883 *
884 * @sa iemOpCommonSseFp_FullFull_To_Full
885 */
886FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
887{
888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
889 if (IEM_IS_MODRM_REG_MODE(bRm))
890 {
891 /*
892 * XMM128, XMM128.
893 */
894 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
896 IEM_MC_LOCAL(X86XMMREG, SseRes);
897 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
898 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
899 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
900 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
901 IEM_MC_PREPARE_SSE_USAGE();
902 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
903 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
904 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
905 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
906
907 IEM_MC_ADVANCE_RIP_AND_FINISH();
908 IEM_MC_END();
909 }
910 else
911 {
912 /*
913 * XMM128, [mem128].
914 */
915 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
916 IEM_MC_LOCAL(X86XMMREG, SseRes);
917 IEM_MC_LOCAL(X86XMMREG, uSrc2);
918 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
919 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
920 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
922
923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
926 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
927
928 IEM_MC_PREPARE_SSE_USAGE();
929 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
930 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
931 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
932
933 IEM_MC_ADVANCE_RIP_AND_FINISH();
934 IEM_MC_END();
935 }
936}
937
938
939/**
940 * Common worker for SSE2 instructions on the forms:
941 * pxxs xmm1, xmm2/mem64
942 *
943 * Proper alignment of the 128-bit operand is enforced.
944 * Exceptions type 3. SSE2 cpuid checks.
945 *
946 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
947 */
948FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
949{
950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
951 if (IEM_IS_MODRM_REG_MODE(bRm))
952 {
953 /*
954 * XMM, XMM.
955 */
956 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
958 IEM_MC_LOCAL(X86XMMREG, SseRes);
959 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
960 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
961 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
962 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
963 IEM_MC_PREPARE_SSE_USAGE();
964 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
965 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
966 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
967 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
968
969 IEM_MC_ADVANCE_RIP_AND_FINISH();
970 IEM_MC_END();
971 }
972 else
973 {
974 /*
975 * XMM, [mem64].
976 */
977 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
978 IEM_MC_LOCAL(X86XMMREG, SseRes);
979 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
980 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
981 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
982 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
984
985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
987 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
988 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
989
990 IEM_MC_PREPARE_SSE_USAGE();
991 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
992 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
993 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
994
995 IEM_MC_ADVANCE_RIP_AND_FINISH();
996 IEM_MC_END();
997 }
998}
999
1000
1001/**
1002 * Common worker for SSE2 instructions on the form:
1003 * pxxxx xmm1, xmm2/mem128
1004 *
1005 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1006 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1007 *
1008 * Exceptions type 4.
1009 */
1010FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1011{
1012 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1013 if (IEM_IS_MODRM_REG_MODE(bRm))
1014 {
1015 /*
1016 * XMM, XMM.
1017 */
1018 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1020 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1021 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1022 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1023 IEM_MC_PREPARE_SSE_USAGE();
1024 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1025 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1026 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1027 IEM_MC_ADVANCE_RIP_AND_FINISH();
1028 IEM_MC_END();
1029 }
1030 else
1031 {
1032 /*
1033 * XMM, [mem128].
1034 */
1035 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1036 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1037 IEM_MC_LOCAL(RTUINT128U, uSrc);
1038 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1040
1041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1043 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1044 /** @todo Most CPUs probably only read the high qword. We read everything to
1045 * make sure we apply segmentation and alignment checks correctly.
1046 * When we have time, it would be interesting to explore what real
1047 * CPUs actually does and whether it will do a TLB load for the lower
1048 * part or skip any associated \#PF. */
1049 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1050
1051 IEM_MC_PREPARE_SSE_USAGE();
1052 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1053 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1054
1055 IEM_MC_ADVANCE_RIP_AND_FINISH();
1056 IEM_MC_END();
1057 }
1058}
1059
1060
1061/**
1062 * Common worker for SSE3 instructions on the forms:
1063 * hxxx xmm1, xmm2/mem128
1064 *
1065 * Proper alignment of the 128-bit operand is enforced.
1066 * Exceptions type 2. SSE3 cpuid checks.
1067 *
1068 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1069 */
1070FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1071{
1072 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1073 if (IEM_IS_MODRM_REG_MODE(bRm))
1074 {
1075 /*
1076 * XMM, XMM.
1077 */
1078 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1080 IEM_MC_LOCAL(X86XMMREG, SseRes);
1081 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
1082 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1083 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1084 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1085 IEM_MC_PREPARE_SSE_USAGE();
1086 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1087 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1088 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1089 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
1090
1091 IEM_MC_ADVANCE_RIP_AND_FINISH();
1092 IEM_MC_END();
1093 }
1094 else
1095 {
1096 /*
1097 * XMM, [mem128].
1098 */
1099 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1100 IEM_MC_LOCAL(X86XMMREG, SseRes);
1101 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1102 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
1103 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1104 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1106
1107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1109 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1110 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1111
1112 IEM_MC_PREPARE_SSE_USAGE();
1113 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1114 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1115 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
1116
1117 IEM_MC_ADVANCE_RIP_AND_FINISH();
1118 IEM_MC_END();
1119 }
1120}
1121
1122
1123/** Opcode 0x0f 0x00 /0. */
1124FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1125{
1126 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1127 IEMOP_HLP_MIN_286();
1128 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1129
1130 if (IEM_IS_MODRM_REG_MODE(bRm))
1131 {
1132 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1133 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1134 iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1135 }
1136
1137 /* Ignore operand size here, memory refs are always 16-bit. */
1138 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1139 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1141 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1142 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1143 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1144 IEM_MC_END();
1145}
1146
1147
1148/** Opcode 0x0f 0x00 /1. */
1149FNIEMOPRM_DEF(iemOp_Grp6_str)
1150{
1151 IEMOP_MNEMONIC(str, "str Rv/Mw");
1152 IEMOP_HLP_MIN_286();
1153 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1154
1155
1156 if (IEM_IS_MODRM_REG_MODE(bRm))
1157 {
1158 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1159 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1160 iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1161 }
1162
1163 /* Ignore operand size here, memory refs are always 16-bit. */
1164 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1165 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1167 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1168 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1169 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1170 IEM_MC_END();
1171}
1172
1173
1174/** Opcode 0x0f 0x00 /2. */
1175FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1176{
1177 IEMOP_MNEMONIC(lldt, "lldt Ew");
1178 IEMOP_HLP_MIN_286();
1179 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1180
1181 if (IEM_IS_MODRM_REG_MODE(bRm))
1182 {
1183 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1184 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1185 IEM_MC_ARG(uint16_t, u16Sel, 0);
1186 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1187 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1188 IEM_MC_END();
1189 }
1190 else
1191 {
1192 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1193 IEM_MC_ARG(uint16_t, u16Sel, 0);
1194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1196 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1197 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1198 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1199 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1200 IEM_MC_END();
1201 }
1202}
1203
1204
1205/** Opcode 0x0f 0x00 /3. */
1206FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1207{
1208 IEMOP_MNEMONIC(ltr, "ltr Ew");
1209 IEMOP_HLP_MIN_286();
1210 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1211
1212 if (IEM_IS_MODRM_REG_MODE(bRm))
1213 {
1214 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1216 IEM_MC_ARG(uint16_t, u16Sel, 0);
1217 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1218 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1219 IEM_MC_END();
1220 }
1221 else
1222 {
1223 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1224 IEM_MC_ARG(uint16_t, u16Sel, 0);
1225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1228 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1229 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1230 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1231 IEM_MC_END();
1232 }
1233}
1234
1235
1236/* Need to associate flag info with the blocks, so duplicate the code. */
1237#define IEMOP_BODY_GRP6_VERX(bRm, fWrite) \
1238 IEMOP_HLP_MIN_286(); \
1239 IEMOP_HLP_NO_REAL_OR_V86_MODE(); \
1240 \
1241 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1242 { \
1243 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0); \
1244 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1245 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1246 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1247 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1248 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1249 IEM_MC_END(); \
1250 } \
1251 else \
1252 { \
1253 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0); \
1254 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1255 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1258 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1259 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1260 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1261 IEM_MC_END(); \
1262 } (void)0
1263
1264/**
1265 * @opmaps grp6
1266 * @opcode /4
1267 * @opflmodify zf
1268 */
1269FNIEMOPRM_DEF(iemOp_Grp6_verr)
1270{
1271 IEMOP_MNEMONIC(verr, "verr Ew");
1272 IEMOP_BODY_GRP6_VERX(bRm, false);
1273}
1274
1275
1276/**
1277 * @opmaps grp6
1278 * @opcode /5
1279 * @opflmodify zf
1280 */
1281FNIEMOPRM_DEF(iemOp_Grp6_verw)
1282{
1283 IEMOP_MNEMONIC(verw, "verw Ew");
1284 IEMOP_BODY_GRP6_VERX(bRm, true);
1285}
1286
1287
1288/**
1289 * Group 6 jump table.
1290 */
1291IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1292{
1293 iemOp_Grp6_sldt,
1294 iemOp_Grp6_str,
1295 iemOp_Grp6_lldt,
1296 iemOp_Grp6_ltr,
1297 iemOp_Grp6_verr,
1298 iemOp_Grp6_verw,
1299 iemOp_InvalidWithRM,
1300 iemOp_InvalidWithRM
1301};
1302
1303/** Opcode 0x0f 0x00. */
1304FNIEMOP_DEF(iemOp_Grp6)
1305{
1306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1307 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1308}
1309
1310
1311/** Opcode 0x0f 0x01 /0. */
1312FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1313{
1314 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1315 IEMOP_HLP_MIN_286();
1316 IEMOP_HLP_64BIT_OP_SIZE();
1317 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1318 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1321 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1322 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1323 IEM_MC_END();
1324}
1325
1326
1327/** Opcode 0x0f 0x01 /0. */
1328FNIEMOP_DEF(iemOp_Grp7_vmcall)
1329{
1330 IEMOP_MNEMONIC(vmcall, "vmcall");
1331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1332
1333 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1334 want all hypercalls regardless of instruction used, and if a
1335 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1336 (NEM/win makes ASSUMPTIONS about this behavior.) */
1337 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmcall);
1338}
1339
1340
1341/** Opcode 0x0f 0x01 /0. */
1342#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1343FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1344{
1345 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1346 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1347 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1348 IEMOP_HLP_DONE_DECODING();
1349 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1350 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1351 iemCImpl_vmlaunch);
1352}
1353#else
1354FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1355{
1356 IEMOP_BITCH_ABOUT_STUB();
1357 IEMOP_RAISE_INVALID_OPCODE_RET();
1358}
1359#endif
1360
1361
1362/** Opcode 0x0f 0x01 /0. */
1363#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1364FNIEMOP_DEF(iemOp_Grp7_vmresume)
1365{
1366 IEMOP_MNEMONIC(vmresume, "vmresume");
1367 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1368 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1369 IEMOP_HLP_DONE_DECODING();
1370 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1371 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1372 iemCImpl_vmresume);
1373}
1374#else
1375FNIEMOP_DEF(iemOp_Grp7_vmresume)
1376{
1377 IEMOP_BITCH_ABOUT_STUB();
1378 IEMOP_RAISE_INVALID_OPCODE_RET();
1379}
1380#endif
1381
1382
1383/** Opcode 0x0f 0x01 /0. */
1384#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1385FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1386{
1387 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1388 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1389 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1390 IEMOP_HLP_DONE_DECODING();
1391 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmxoff);
1392}
1393#else
1394FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1395{
1396 IEMOP_BITCH_ABOUT_STUB();
1397 IEMOP_RAISE_INVALID_OPCODE_RET();
1398}
1399#endif
1400
1401
1402/** Opcode 0x0f 0x01 /1. */
1403FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1404{
1405 IEMOP_MNEMONIC(sidt, "sidt Ms");
1406 IEMOP_HLP_MIN_286();
1407 IEMOP_HLP_64BIT_OP_SIZE();
1408 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1409 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1412 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1413 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1414 IEM_MC_END();
1415}
1416
1417
1418/** Opcode 0x0f 0x01 /1. */
1419FNIEMOP_DEF(iemOp_Grp7_monitor)
1420{
1421 IEMOP_MNEMONIC(monitor, "monitor");
1422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1423 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1424}
1425
1426
1427/** Opcode 0x0f 0x01 /1. */
1428FNIEMOP_DEF(iemOp_Grp7_mwait)
1429{
1430 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1432 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_mwait);
1433}
1434
1435
1436/** Opcode 0x0f 0x01 /2. */
1437FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1438{
1439 IEMOP_MNEMONIC(lgdt, "lgdt");
1440 IEMOP_HLP_64BIT_OP_SIZE();
1441 IEM_MC_BEGIN(0, 0);
1442 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1445 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1446 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1447 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1448 IEM_MC_END();
1449}
1450
1451
1452/** Opcode 0x0f 0x01 0xd0. */
1453FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1454{
1455 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1456 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1457 {
1458 /** @todo r=ramshankar: We should use
1459 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1460 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1461/** @todo testcase: test prefixes and exceptions. currently not checking for the
1462 * OPSIZE one ... */
1463 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1464 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
1465 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1466 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
1467 iemCImpl_xgetbv);
1468 }
1469 IEMOP_RAISE_INVALID_OPCODE_RET();
1470}
1471
1472
1473/** Opcode 0x0f 0x01 0xd1. */
1474FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1475{
1476 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1477 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1478 {
1479 /** @todo r=ramshankar: We should use
1480 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1481 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1482/** @todo testcase: test prefixes and exceptions. currently not checking for the
1483 * OPSIZE one ... */
1484 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1485 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_xsetbv);
1486 }
1487 IEMOP_RAISE_INVALID_OPCODE_RET();
1488}
1489
1490
1491/** Opcode 0x0f 0x01 /3. */
1492FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1493{
1494 IEMOP_MNEMONIC(lidt, "lidt");
1495 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1496 IEM_MC_BEGIN(0, 0);
1497 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1501 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg, /*=*/ enmEffOpSize, 2);
1502 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1503 IEM_MC_END();
1504}
1505
1506
1507/** Opcode 0x0f 0x01 0xd8. */
1508#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1509FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1510{
1511 IEMOP_MNEMONIC(vmrun, "vmrun");
1512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1513 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1514 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1515 iemCImpl_vmrun);
1516}
1517#else
1518FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1519#endif
1520
1521/** Opcode 0x0f 0x01 0xd9. */
1522FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1523{
1524 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1525 /** @todo r=bird: Table A-8 on page 524 in vol 3 has VMGEXIT for this
1526 * opcode sequence when F3 or F2 is used as prefix. So, the assumtion
1527 * here cannot be right... */
1528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1529
1530 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1531 want all hypercalls regardless of instruction used, and if a
1532 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1533 (NEM/win makes ASSUMPTIONS about this behavior.) */
1534 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmmcall);
1535}
1536
1537/** Opcode 0x0f 0x01 0xda. */
1538#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1539FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1540{
1541 IEMOP_MNEMONIC(vmload, "vmload");
1542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1543 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmload);
1544}
1545#else
1546FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1547#endif
1548
1549
1550/** Opcode 0x0f 0x01 0xdb. */
1551#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1552FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1553{
1554 IEMOP_MNEMONIC(vmsave, "vmsave");
1555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1556 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmsave);
1557}
1558#else
1559FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1560#endif
1561
1562
1563/** Opcode 0x0f 0x01 0xdc. */
1564#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1565FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1566{
1567 IEMOP_MNEMONIC(stgi, "stgi");
1568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1569 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_stgi);
1570}
1571#else
1572FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1573#endif
1574
1575
1576/** Opcode 0x0f 0x01 0xdd. */
1577#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1578FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1579{
1580 IEMOP_MNEMONIC(clgi, "clgi");
1581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1582 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clgi);
1583}
1584#else
1585FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1586#endif
1587
1588
1589/** Opcode 0x0f 0x01 0xdf. */
1590#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1591FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1592{
1593 IEMOP_MNEMONIC(invlpga, "invlpga");
1594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1595 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpga);
1596}
1597#else
1598FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1599#endif
1600
1601
1602/** Opcode 0x0f 0x01 0xde. */
1603#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1604FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1605{
1606 IEMOP_MNEMONIC(skinit, "skinit");
1607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1608 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_skinit);
1609}
1610#else
1611FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1612#endif
1613
1614
1615/** Opcode 0x0f 0x01 /4. */
1616FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1617{
1618 IEMOP_MNEMONIC(smsw, "smsw");
1619 IEMOP_HLP_MIN_286();
1620 if (IEM_IS_MODRM_REG_MODE(bRm))
1621 {
1622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1623 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1624 iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1625 }
1626
1627 /* Ignore operand size here, memory refs are always 16-bit. */
1628 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1629 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1632 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1633 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1634 IEM_MC_END();
1635}
1636
1637
1638/** Opcode 0x0f 0x01 /6. */
1639FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1640{
1641 /* The operand size is effectively ignored, all is 16-bit and only the
1642 lower 3-bits are used. */
1643 IEMOP_MNEMONIC(lmsw, "lmsw");
1644 IEMOP_HLP_MIN_286();
1645 if (IEM_IS_MODRM_REG_MODE(bRm))
1646 {
1647 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1649 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1650 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1651 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1652 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1653 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1654 IEM_MC_END();
1655 }
1656 else
1657 {
1658 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1659 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1660 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1663 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1664 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1665 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1666 IEM_MC_END();
1667 }
1668}
1669
1670
1671/** Opcode 0x0f 0x01 /7. */
1672FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1673{
1674 IEMOP_MNEMONIC(invlpg, "invlpg");
1675 IEMOP_HLP_MIN_486();
1676 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1677 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1680 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpg, GCPtrEffDst);
1681 IEM_MC_END();
1682}
1683
1684
1685/** Opcode 0x0f 0x01 0xf8. */
1686FNIEMOP_DEF(iemOp_Grp7_swapgs)
1687{
1688 IEMOP_MNEMONIC(swapgs, "swapgs");
1689 IEMOP_HLP_ONLY_64BIT();
1690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1691 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS), iemCImpl_swapgs);
1692}
1693
1694
1695/** Opcode 0x0f 0x01 0xf9. */
1696FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1697{
1698 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1700 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
1701 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1702 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
1703 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
1704 iemCImpl_rdtscp);
1705}
1706
1707
1708/**
1709 * Group 7 jump table, memory variant.
1710 */
1711IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1712{
1713 iemOp_Grp7_sgdt,
1714 iemOp_Grp7_sidt,
1715 iemOp_Grp7_lgdt,
1716 iemOp_Grp7_lidt,
1717 iemOp_Grp7_smsw,
1718 iemOp_InvalidWithRM,
1719 iemOp_Grp7_lmsw,
1720 iemOp_Grp7_invlpg
1721};
1722
1723
1724/** Opcode 0x0f 0x01. */
1725FNIEMOP_DEF(iemOp_Grp7)
1726{
1727 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1728 if (IEM_IS_MODRM_MEM_MODE(bRm))
1729 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1730
1731 switch (IEM_GET_MODRM_REG_8(bRm))
1732 {
1733 case 0:
1734 switch (IEM_GET_MODRM_RM_8(bRm))
1735 {
1736 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1737 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1738 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1739 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1740 }
1741 IEMOP_RAISE_INVALID_OPCODE_RET();
1742
1743 case 1:
1744 switch (IEM_GET_MODRM_RM_8(bRm))
1745 {
1746 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1747 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1748 }
1749 IEMOP_RAISE_INVALID_OPCODE_RET();
1750
1751 case 2:
1752 switch (IEM_GET_MODRM_RM_8(bRm))
1753 {
1754 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1755 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1756 }
1757 IEMOP_RAISE_INVALID_OPCODE_RET();
1758
1759 case 3:
1760 switch (IEM_GET_MODRM_RM_8(bRm))
1761 {
1762 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1763 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1764 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1765 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1766 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1767 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1768 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1769 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1770 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1771 }
1772
1773 case 4:
1774 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1775
1776 case 5:
1777 IEMOP_RAISE_INVALID_OPCODE_RET();
1778
1779 case 6:
1780 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1781
1782 case 7:
1783 switch (IEM_GET_MODRM_RM_8(bRm))
1784 {
1785 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1786 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1787 }
1788 IEMOP_RAISE_INVALID_OPCODE_RET();
1789
1790 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1791 }
1792}
1793
1794FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1795{
1796 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1798
1799 if (IEM_IS_MODRM_REG_MODE(bRm))
1800 {
1801 switch (pVCpu->iem.s.enmEffOpSize)
1802 {
1803 case IEMMODE_16BIT:
1804 IEM_MC_BEGIN(0, 0);
1805 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1806 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1807 IEM_MC_ARG(uint16_t, u16Sel, 1);
1808 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1809
1810 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1811 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1812 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1813 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1814
1815 IEM_MC_END();
1816 break;
1817
1818 case IEMMODE_32BIT:
1819 case IEMMODE_64BIT:
1820 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1821 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1822 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1823 IEM_MC_ARG(uint16_t, u16Sel, 1);
1824 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1825
1826 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1827 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1828 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1829 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1830
1831 IEM_MC_END();
1832 break;
1833
1834 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1835 }
1836 }
1837 else
1838 {
1839 switch (pVCpu->iem.s.enmEffOpSize)
1840 {
1841 case IEMMODE_16BIT:
1842 IEM_MC_BEGIN(0, 0);
1843 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1844 IEM_MC_ARG(uint16_t, u16Sel, 1);
1845 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1847
1848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1849 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1850
1851 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1852 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1853 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1854 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1855
1856 IEM_MC_END();
1857 break;
1858
1859 case IEMMODE_32BIT:
1860 case IEMMODE_64BIT:
1861 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1862 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1863 IEM_MC_ARG(uint16_t, u16Sel, 1);
1864 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1866
1867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1868 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1869/** @todo testcase: make sure it's a 16-bit read. */
1870
1871 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1872 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1873 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1874 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1875
1876 IEM_MC_END();
1877 break;
1878
1879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1880 }
1881 }
1882}
1883
1884
1885
1886/**
1887 * @opcode 0x02
1888 * @opflmodify zf
1889 */
1890FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1891{
1892 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1893 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1894}
1895
1896
1897/**
1898 * @opcode 0x03
1899 * @opflmodify zf
1900 */
1901FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1902{
1903 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1904 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1905}
1906
1907
1908/** Opcode 0x0f 0x05. */
1909FNIEMOP_DEF(iemOp_syscall)
1910{
1911 if (RT_LIKELY(pVCpu->iem.s.uTargetCpu != IEMTARGETCPU_286))
1912 {
1913 IEMOP_MNEMONIC(syscall, "syscall");
1914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1915 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1916 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0, iemCImpl_syscall);
1917 }
1918 else
1919 {
1920 IEMOP_MNEMONIC(loadall286, "loadall286");
1921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1922 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1923 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1924 RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_loadall286);
1925 }
1926}
1927
1928
1929/** Opcode 0x0f 0x06. */
1930FNIEMOP_DEF(iemOp_clts)
1931{
1932 IEMOP_MNEMONIC(clts, "clts");
1933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1934 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_clts);
1935}
1936
1937
1938/** Opcode 0x0f 0x07. */
1939FNIEMOP_DEF(iemOp_sysret)
1940{
1941 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1943 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1944 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
1945 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
1946}
1947
1948
1949/** Opcode 0x0f 0x08. */
1950FNIEMOP_DEF(iemOp_invd)
1951{
1952 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1953 IEMOP_HLP_MIN_486();
1954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1955 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invd);
1956}
1957
1958
1959/** Opcode 0x0f 0x09. */
1960FNIEMOP_DEF(iemOp_wbinvd)
1961{
1962 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1963 IEMOP_HLP_MIN_486();
1964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1965 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wbinvd);
1966}
1967
1968
1969/** Opcode 0x0f 0x0b. */
1970FNIEMOP_DEF(iemOp_ud2)
1971{
1972 IEMOP_MNEMONIC(ud2, "ud2");
1973 IEMOP_RAISE_INVALID_OPCODE_RET();
1974}
1975
1976/** Opcode 0x0f 0x0d. */
1977FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1978{
1979 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1980 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1981 {
1982 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1983 IEMOP_RAISE_INVALID_OPCODE_RET();
1984 }
1985
1986 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1987 if (IEM_IS_MODRM_REG_MODE(bRm))
1988 {
1989 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1990 IEMOP_RAISE_INVALID_OPCODE_RET();
1991 }
1992
1993 switch (IEM_GET_MODRM_REG_8(bRm))
1994 {
1995 case 2: /* Aliased to /0 for the time being. */
1996 case 4: /* Aliased to /0 for the time being. */
1997 case 5: /* Aliased to /0 for the time being. */
1998 case 6: /* Aliased to /0 for the time being. */
1999 case 7: /* Aliased to /0 for the time being. */
2000 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2001 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2002 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2003 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2004 }
2005
2006 IEM_MC_BEGIN(0, 0);
2007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2010 /* Currently a NOP. */
2011 IEM_MC_NOREF(GCPtrEffSrc);
2012 IEM_MC_ADVANCE_RIP_AND_FINISH();
2013 IEM_MC_END();
2014}
2015
2016
2017/** Opcode 0x0f 0x0e. */
2018FNIEMOP_DEF(iemOp_femms)
2019{
2020 IEMOP_MNEMONIC(femms, "femms");
2021
2022 IEM_MC_BEGIN(0, 0);
2023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2024 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2025 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2026 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2027 IEM_MC_FPU_FROM_MMX_MODE();
2028 IEM_MC_ADVANCE_RIP_AND_FINISH();
2029 IEM_MC_END();
2030}
2031
2032
2033/** Opcode 0x0f 0x0f. */
2034FNIEMOP_DEF(iemOp_3Dnow)
2035{
2036 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2037 {
2038 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2039 IEMOP_RAISE_INVALID_OPCODE_RET();
2040 }
2041
2042#ifdef IEM_WITH_3DNOW
2043 /* This is pretty sparse, use switch instead of table. */
2044 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2045 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2046#else
2047 IEMOP_BITCH_ABOUT_STUB();
2048 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2049#endif
2050}
2051
2052
2053/**
2054 * @opcode 0x10
2055 * @oppfx none
2056 * @opcpuid sse
2057 * @opgroup og_sse_simdfp_datamove
2058 * @opxcpttype 4UA
2059 * @optest op1=1 op2=2 -> op1=2
2060 * @optest op1=0 op2=-22 -> op1=-22
2061 */
2062FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2063{
2064 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2066 if (IEM_IS_MODRM_REG_MODE(bRm))
2067 {
2068 /*
2069 * XMM128, XMM128.
2070 */
2071 IEM_MC_BEGIN(0, 0);
2072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2073 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2074 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2075 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2076 IEM_GET_MODRM_RM(pVCpu, bRm));
2077 IEM_MC_ADVANCE_RIP_AND_FINISH();
2078 IEM_MC_END();
2079 }
2080 else
2081 {
2082 /*
2083 * XMM128, [mem128].
2084 */
2085 IEM_MC_BEGIN(0, 0);
2086 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2088
2089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2091 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2092 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2093
2094 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2095 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2096
2097 IEM_MC_ADVANCE_RIP_AND_FINISH();
2098 IEM_MC_END();
2099 }
2100
2101}
2102
2103
2104/**
2105 * @opcode 0x10
2106 * @oppfx 0x66
2107 * @opcpuid sse2
2108 * @opgroup og_sse2_pcksclr_datamove
2109 * @opxcpttype 4UA
2110 * @optest op1=1 op2=2 -> op1=2
2111 * @optest op1=0 op2=-42 -> op1=-42
2112 */
2113FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2114{
2115 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2117 if (IEM_IS_MODRM_REG_MODE(bRm))
2118 {
2119 /*
2120 * XMM128, XMM128.
2121 */
2122 IEM_MC_BEGIN(0, 0);
2123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2124 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2125 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2126 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2127 IEM_GET_MODRM_RM(pVCpu, bRm));
2128 IEM_MC_ADVANCE_RIP_AND_FINISH();
2129 IEM_MC_END();
2130 }
2131 else
2132 {
2133 /*
2134 * XMM128, [mem128].
2135 */
2136 IEM_MC_BEGIN(0, 0);
2137 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2139
2140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2142 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2143 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2144
2145 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2146 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2147
2148 IEM_MC_ADVANCE_RIP_AND_FINISH();
2149 IEM_MC_END();
2150 }
2151}
2152
2153
2154/**
2155 * @opcode 0x10
2156 * @oppfx 0xf3
2157 * @opcpuid sse
2158 * @opgroup og_sse_simdfp_datamove
2159 * @opxcpttype 5
2160 * @optest op1=1 op2=2 -> op1=2
2161 * @optest op1=0 op2=-22 -> op1=-22
2162 */
2163FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2164{
2165 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2167 if (IEM_IS_MODRM_REG_MODE(bRm))
2168 {
2169 /*
2170 * XMM32, XMM32.
2171 */
2172 IEM_MC_BEGIN(0, 0);
2173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2174 IEM_MC_LOCAL(uint32_t, uSrc);
2175
2176 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2177 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2178 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2179 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2180
2181 IEM_MC_ADVANCE_RIP_AND_FINISH();
2182 IEM_MC_END();
2183 }
2184 else
2185 {
2186 /*
2187 * XMM128, [mem32].
2188 */
2189 IEM_MC_BEGIN(0, 0);
2190 IEM_MC_LOCAL(uint32_t, uSrc);
2191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2192
2193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2195 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2196 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2197
2198 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2199 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2200
2201 IEM_MC_ADVANCE_RIP_AND_FINISH();
2202 IEM_MC_END();
2203 }
2204}
2205
2206
2207/**
2208 * @opcode 0x10
2209 * @oppfx 0xf2
2210 * @opcpuid sse2
2211 * @opgroup og_sse2_pcksclr_datamove
2212 * @opxcpttype 5
2213 * @optest op1=1 op2=2 -> op1=2
2214 * @optest op1=0 op2=-42 -> op1=-42
2215 */
2216FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2217{
2218 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2220 if (IEM_IS_MODRM_REG_MODE(bRm))
2221 {
2222 /*
2223 * XMM64, XMM64.
2224 */
2225 IEM_MC_BEGIN(0, 0);
2226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2227 IEM_MC_LOCAL(uint64_t, uSrc);
2228
2229 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2230 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2231 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2232 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2233
2234 IEM_MC_ADVANCE_RIP_AND_FINISH();
2235 IEM_MC_END();
2236 }
2237 else
2238 {
2239 /*
2240 * XMM128, [mem64].
2241 */
2242 IEM_MC_BEGIN(0, 0);
2243 IEM_MC_LOCAL(uint64_t, uSrc);
2244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2245
2246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2248 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2249 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2250
2251 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2252 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2253
2254 IEM_MC_ADVANCE_RIP_AND_FINISH();
2255 IEM_MC_END();
2256 }
2257}
2258
2259
2260/**
2261 * @opcode 0x11
2262 * @oppfx none
2263 * @opcpuid sse
2264 * @opgroup og_sse_simdfp_datamove
2265 * @opxcpttype 4UA
2266 * @optest op1=1 op2=2 -> op1=2
2267 * @optest op1=0 op2=-42 -> op1=-42
2268 */
2269FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2270{
2271 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2272 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2273 if (IEM_IS_MODRM_REG_MODE(bRm))
2274 {
2275 /*
2276 * XMM128, XMM128.
2277 */
2278 IEM_MC_BEGIN(0, 0);
2279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2280 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2281 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2282 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2283 IEM_GET_MODRM_REG(pVCpu, bRm));
2284 IEM_MC_ADVANCE_RIP_AND_FINISH();
2285 IEM_MC_END();
2286 }
2287 else
2288 {
2289 /*
2290 * [mem128], XMM128.
2291 */
2292 IEM_MC_BEGIN(0, 0);
2293 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2295
2296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2298 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2299 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2300
2301 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2302 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2303
2304 IEM_MC_ADVANCE_RIP_AND_FINISH();
2305 IEM_MC_END();
2306 }
2307}
2308
2309
2310/**
2311 * @opcode 0x11
2312 * @oppfx 0x66
2313 * @opcpuid sse2
2314 * @opgroup og_sse2_pcksclr_datamove
2315 * @opxcpttype 4UA
2316 * @optest op1=1 op2=2 -> op1=2
2317 * @optest op1=0 op2=-42 -> op1=-42
2318 */
2319FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2320{
2321 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2322 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2323 if (IEM_IS_MODRM_REG_MODE(bRm))
2324 {
2325 /*
2326 * XMM128, XMM128.
2327 */
2328 IEM_MC_BEGIN(0, 0);
2329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2330 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2331 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2332 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2333 IEM_GET_MODRM_REG(pVCpu, bRm));
2334 IEM_MC_ADVANCE_RIP_AND_FINISH();
2335 IEM_MC_END();
2336 }
2337 else
2338 {
2339 /*
2340 * [mem128], XMM128.
2341 */
2342 IEM_MC_BEGIN(0, 0);
2343 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2345
2346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2348 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2349 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2350
2351 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2352 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2353
2354 IEM_MC_ADVANCE_RIP_AND_FINISH();
2355 IEM_MC_END();
2356 }
2357}
2358
2359
2360/**
2361 * @opcode 0x11
2362 * @oppfx 0xf3
2363 * @opcpuid sse
2364 * @opgroup og_sse_simdfp_datamove
2365 * @opxcpttype 5
2366 * @optest op1=1 op2=2 -> op1=2
2367 * @optest op1=0 op2=-22 -> op1=-22
2368 */
2369FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2370{
2371 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2372 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2373 if (IEM_IS_MODRM_REG_MODE(bRm))
2374 {
2375 /*
2376 * XMM32, XMM32.
2377 */
2378 IEM_MC_BEGIN(0, 0);
2379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2380 IEM_MC_LOCAL(uint32_t, uSrc);
2381
2382 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2383 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2384 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2385 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2386
2387 IEM_MC_ADVANCE_RIP_AND_FINISH();
2388 IEM_MC_END();
2389 }
2390 else
2391 {
2392 /*
2393 * [mem32], XMM32.
2394 */
2395 IEM_MC_BEGIN(0, 0);
2396 IEM_MC_LOCAL(uint32_t, uSrc);
2397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2398
2399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2401 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2402 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2403
2404 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2405 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2406
2407 IEM_MC_ADVANCE_RIP_AND_FINISH();
2408 IEM_MC_END();
2409 }
2410}
2411
2412
2413/**
2414 * @opcode 0x11
2415 * @oppfx 0xf2
2416 * @opcpuid sse2
2417 * @opgroup og_sse2_pcksclr_datamove
2418 * @opxcpttype 5
2419 * @optest op1=1 op2=2 -> op1=2
2420 * @optest op1=0 op2=-42 -> op1=-42
2421 */
2422FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2423{
2424 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2425 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2426 if (IEM_IS_MODRM_REG_MODE(bRm))
2427 {
2428 /*
2429 * XMM64, XMM64.
2430 */
2431 IEM_MC_BEGIN(0, 0);
2432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2433 IEM_MC_LOCAL(uint64_t, uSrc);
2434
2435 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2436 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2437 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2438 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2439
2440 IEM_MC_ADVANCE_RIP_AND_FINISH();
2441 IEM_MC_END();
2442 }
2443 else
2444 {
2445 /*
2446 * [mem64], XMM64.
2447 */
2448 IEM_MC_BEGIN(0, 0);
2449 IEM_MC_LOCAL(uint64_t, uSrc);
2450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2451
2452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2454 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2455 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2456
2457 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2458 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2459
2460 IEM_MC_ADVANCE_RIP_AND_FINISH();
2461 IEM_MC_END();
2462 }
2463}
2464
2465
2466FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2467{
2468 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2469 if (IEM_IS_MODRM_REG_MODE(bRm))
2470 {
2471 /**
2472 * @opcode 0x12
2473 * @opcodesub 11 mr/reg
2474 * @oppfx none
2475 * @opcpuid sse
2476 * @opgroup og_sse_simdfp_datamove
2477 * @opxcpttype 5
2478 * @optest op1=1 op2=2 -> op1=2
2479 * @optest op1=0 op2=-42 -> op1=-42
2480 */
2481 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2482
2483 IEM_MC_BEGIN(0, 0);
2484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2485 IEM_MC_LOCAL(uint64_t, uSrc);
2486
2487 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2488 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2489 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2490 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2491
2492 IEM_MC_ADVANCE_RIP_AND_FINISH();
2493 IEM_MC_END();
2494 }
2495 else
2496 {
2497 /**
2498 * @opdone
2499 * @opcode 0x12
2500 * @opcodesub !11 mr/reg
2501 * @oppfx none
2502 * @opcpuid sse
2503 * @opgroup og_sse_simdfp_datamove
2504 * @opxcpttype 5
2505 * @optest op1=1 op2=2 -> op1=2
2506 * @optest op1=0 op2=-42 -> op1=-42
2507 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2508 */
2509 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2510
2511 IEM_MC_BEGIN(0, 0);
2512 IEM_MC_LOCAL(uint64_t, uSrc);
2513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2514
2515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2517 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2518 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2519
2520 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2521 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2522
2523 IEM_MC_ADVANCE_RIP_AND_FINISH();
2524 IEM_MC_END();
2525 }
2526}
2527
2528
2529/**
2530 * @opcode 0x12
2531 * @opcodesub !11 mr/reg
2532 * @oppfx 0x66
2533 * @opcpuid sse2
2534 * @opgroup og_sse2_pcksclr_datamove
2535 * @opxcpttype 5
2536 * @optest op1=1 op2=2 -> op1=2
2537 * @optest op1=0 op2=-42 -> op1=-42
2538 */
2539FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2540{
2541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2542 if (IEM_IS_MODRM_MEM_MODE(bRm))
2543 {
2544 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2545
2546 IEM_MC_BEGIN(0, 0);
2547 IEM_MC_LOCAL(uint64_t, uSrc);
2548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2549
2550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2552 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2553 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2554
2555 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2556 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2557
2558 IEM_MC_ADVANCE_RIP_AND_FINISH();
2559 IEM_MC_END();
2560 }
2561
2562 /**
2563 * @opdone
2564 * @opmnemonic ud660f12m3
2565 * @opcode 0x12
2566 * @opcodesub 11 mr/reg
2567 * @oppfx 0x66
2568 * @opunused immediate
2569 * @opcpuid sse
2570 * @optest ->
2571 */
2572 else
2573 IEMOP_RAISE_INVALID_OPCODE_RET();
2574}
2575
2576
2577/**
2578 * @opcode 0x12
2579 * @oppfx 0xf3
2580 * @opcpuid sse3
2581 * @opgroup og_sse3_pcksclr_datamove
2582 * @opxcpttype 4
2583 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2584 * op1=0x00000002000000020000000100000001
2585 */
2586FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2587{
2588 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2589 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2590 if (IEM_IS_MODRM_REG_MODE(bRm))
2591 {
2592 /*
2593 * XMM, XMM.
2594 */
2595 IEM_MC_BEGIN(0, 0);
2596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2597 IEM_MC_LOCAL(RTUINT128U, uSrc);
2598
2599 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2600 IEM_MC_PREPARE_SSE_USAGE();
2601
2602 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2603 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2604 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2605 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2606 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2607
2608 IEM_MC_ADVANCE_RIP_AND_FINISH();
2609 IEM_MC_END();
2610 }
2611 else
2612 {
2613 /*
2614 * XMM, [mem128].
2615 */
2616 IEM_MC_BEGIN(0, 0);
2617 IEM_MC_LOCAL(RTUINT128U, uSrc);
2618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2619
2620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2622 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2623 IEM_MC_PREPARE_SSE_USAGE();
2624
2625 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2626 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2627 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2628 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2629 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2630
2631 IEM_MC_ADVANCE_RIP_AND_FINISH();
2632 IEM_MC_END();
2633 }
2634}
2635
2636
2637/**
2638 * @opcode 0x12
2639 * @oppfx 0xf2
2640 * @opcpuid sse3
2641 * @opgroup og_sse3_pcksclr_datamove
2642 * @opxcpttype 5
2643 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2644 * op1=0x22222222111111112222222211111111
2645 */
2646FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2647{
2648 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2649 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2650 if (IEM_IS_MODRM_REG_MODE(bRm))
2651 {
2652 /*
2653 * XMM128, XMM64.
2654 */
2655 IEM_MC_BEGIN(0, 0);
2656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2657 IEM_MC_LOCAL(uint64_t, uSrc);
2658
2659 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2660 IEM_MC_PREPARE_SSE_USAGE();
2661
2662 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2663 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2664 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2665
2666 IEM_MC_ADVANCE_RIP_AND_FINISH();
2667 IEM_MC_END();
2668 }
2669 else
2670 {
2671 /*
2672 * XMM128, [mem64].
2673 */
2674 IEM_MC_BEGIN(0, 0);
2675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2676 IEM_MC_LOCAL(uint64_t, uSrc);
2677
2678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2680 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2681 IEM_MC_PREPARE_SSE_USAGE();
2682
2683 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2684 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2685 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2686
2687 IEM_MC_ADVANCE_RIP_AND_FINISH();
2688 IEM_MC_END();
2689 }
2690}
2691
2692
2693/**
2694 * @opcode 0x13
2695 * @opcodesub !11 mr/reg
2696 * @oppfx none
2697 * @opcpuid sse
2698 * @opgroup og_sse_simdfp_datamove
2699 * @opxcpttype 5
2700 * @optest op1=1 op2=2 -> op1=2
2701 * @optest op1=0 op2=-42 -> op1=-42
2702 */
2703FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2704{
2705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2706 if (IEM_IS_MODRM_MEM_MODE(bRm))
2707 {
2708 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2709
2710 IEM_MC_BEGIN(0, 0);
2711 IEM_MC_LOCAL(uint64_t, uSrc);
2712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2713
2714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2716 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2717 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2718
2719 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2720 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2721
2722 IEM_MC_ADVANCE_RIP_AND_FINISH();
2723 IEM_MC_END();
2724 }
2725
2726 /**
2727 * @opdone
2728 * @opmnemonic ud0f13m3
2729 * @opcode 0x13
2730 * @opcodesub 11 mr/reg
2731 * @oppfx none
2732 * @opunused immediate
2733 * @opcpuid sse
2734 * @optest ->
2735 */
2736 else
2737 IEMOP_RAISE_INVALID_OPCODE_RET();
2738}
2739
2740
2741/**
2742 * @opcode 0x13
2743 * @opcodesub !11 mr/reg
2744 * @oppfx 0x66
2745 * @opcpuid sse2
2746 * @opgroup og_sse2_pcksclr_datamove
2747 * @opxcpttype 5
2748 * @optest op1=1 op2=2 -> op1=2
2749 * @optest op1=0 op2=-42 -> op1=-42
2750 */
2751FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2752{
2753 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2754 if (IEM_IS_MODRM_MEM_MODE(bRm))
2755 {
2756 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2757
2758 IEM_MC_BEGIN(0, 0);
2759 IEM_MC_LOCAL(uint64_t, uSrc);
2760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2761
2762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2764 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2765 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2766
2767 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2768 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2769
2770 IEM_MC_ADVANCE_RIP_AND_FINISH();
2771 IEM_MC_END();
2772 }
2773
2774 /**
2775 * @opdone
2776 * @opmnemonic ud660f13m3
2777 * @opcode 0x13
2778 * @opcodesub 11 mr/reg
2779 * @oppfx 0x66
2780 * @opunused immediate
2781 * @opcpuid sse
2782 * @optest ->
2783 */
2784 else
2785 IEMOP_RAISE_INVALID_OPCODE_RET();
2786}
2787
2788
2789/**
2790 * @opmnemonic udf30f13
2791 * @opcode 0x13
2792 * @oppfx 0xf3
2793 * @opunused intel-modrm
2794 * @opcpuid sse
2795 * @optest ->
2796 * @opdone
2797 */
2798
2799/**
2800 * @opmnemonic udf20f13
2801 * @opcode 0x13
2802 * @oppfx 0xf2
2803 * @opunused intel-modrm
2804 * @opcpuid sse
2805 * @optest ->
2806 * @opdone
2807 */
2808
2809/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2810FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2811{
2812 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2813 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2814}
2815
2816
2817/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2818FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2819{
2820 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2821 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2822}
2823
2824
2825/**
2826 * @opdone
2827 * @opmnemonic udf30f14
2828 * @opcode 0x14
2829 * @oppfx 0xf3
2830 * @opunused intel-modrm
2831 * @opcpuid sse
2832 * @optest ->
2833 * @opdone
2834 */
2835
2836/**
2837 * @opmnemonic udf20f14
2838 * @opcode 0x14
2839 * @oppfx 0xf2
2840 * @opunused intel-modrm
2841 * @opcpuid sse
2842 * @optest ->
2843 * @opdone
2844 */
2845
2846/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2847FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2848{
2849 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2850 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2851}
2852
2853
2854/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2855FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2856{
2857 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2858 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2859}
2860
2861
2862/* Opcode 0xf3 0x0f 0x15 - invalid */
2863/* Opcode 0xf2 0x0f 0x15 - invalid */
2864
2865/**
2866 * @opdone
2867 * @opmnemonic udf30f15
2868 * @opcode 0x15
2869 * @oppfx 0xf3
2870 * @opunused intel-modrm
2871 * @opcpuid sse
2872 * @optest ->
2873 * @opdone
2874 */
2875
2876/**
2877 * @opmnemonic udf20f15
2878 * @opcode 0x15
2879 * @oppfx 0xf2
2880 * @opunused intel-modrm
2881 * @opcpuid sse
2882 * @optest ->
2883 * @opdone
2884 */
2885
2886FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2887{
2888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2889 if (IEM_IS_MODRM_REG_MODE(bRm))
2890 {
2891 /**
2892 * @opcode 0x16
2893 * @opcodesub 11 mr/reg
2894 * @oppfx none
2895 * @opcpuid sse
2896 * @opgroup og_sse_simdfp_datamove
2897 * @opxcpttype 5
2898 * @optest op1=1 op2=2 -> op1=2
2899 * @optest op1=0 op2=-42 -> op1=-42
2900 */
2901 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2902
2903 IEM_MC_BEGIN(0, 0);
2904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2905 IEM_MC_LOCAL(uint64_t, uSrc);
2906
2907 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2908 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2909 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2910 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2911
2912 IEM_MC_ADVANCE_RIP_AND_FINISH();
2913 IEM_MC_END();
2914 }
2915 else
2916 {
2917 /**
2918 * @opdone
2919 * @opcode 0x16
2920 * @opcodesub !11 mr/reg
2921 * @oppfx none
2922 * @opcpuid sse
2923 * @opgroup og_sse_simdfp_datamove
2924 * @opxcpttype 5
2925 * @optest op1=1 op2=2 -> op1=2
2926 * @optest op1=0 op2=-42 -> op1=-42
2927 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2928 */
2929 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2930
2931 IEM_MC_BEGIN(0, 0);
2932 IEM_MC_LOCAL(uint64_t, uSrc);
2933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2934
2935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2937 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2938 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2939
2940 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2941 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2942
2943 IEM_MC_ADVANCE_RIP_AND_FINISH();
2944 IEM_MC_END();
2945 }
2946}
2947
2948
2949/**
2950 * @opcode 0x16
2951 * @opcodesub !11 mr/reg
2952 * @oppfx 0x66
2953 * @opcpuid sse2
2954 * @opgroup og_sse2_pcksclr_datamove
2955 * @opxcpttype 5
2956 * @optest op1=1 op2=2 -> op1=2
2957 * @optest op1=0 op2=-42 -> op1=-42
2958 */
2959FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2960{
2961 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2962 if (IEM_IS_MODRM_MEM_MODE(bRm))
2963 {
2964 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2965
2966 IEM_MC_BEGIN(0, 0);
2967 IEM_MC_LOCAL(uint64_t, uSrc);
2968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2969
2970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2972 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2973 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2974
2975 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2976 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2977
2978 IEM_MC_ADVANCE_RIP_AND_FINISH();
2979 IEM_MC_END();
2980 }
2981
2982 /**
2983 * @opdone
2984 * @opmnemonic ud660f16m3
2985 * @opcode 0x16
2986 * @opcodesub 11 mr/reg
2987 * @oppfx 0x66
2988 * @opunused immediate
2989 * @opcpuid sse
2990 * @optest ->
2991 */
2992 else
2993 IEMOP_RAISE_INVALID_OPCODE_RET();
2994}
2995
2996
2997/**
2998 * @opcode 0x16
2999 * @oppfx 0xf3
3000 * @opcpuid sse3
3001 * @opgroup og_sse3_pcksclr_datamove
3002 * @opxcpttype 4
3003 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3004 * op1=0x00000002000000020000000100000001
3005 */
3006FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3007{
3008 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3009 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3010 if (IEM_IS_MODRM_REG_MODE(bRm))
3011 {
3012 /*
3013 * XMM128, XMM128.
3014 */
3015 IEM_MC_BEGIN(0, 0);
3016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3017 IEM_MC_LOCAL(RTUINT128U, uSrc);
3018
3019 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3020 IEM_MC_PREPARE_SSE_USAGE();
3021
3022 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3023 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3024 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3025 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3026 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3027
3028 IEM_MC_ADVANCE_RIP_AND_FINISH();
3029 IEM_MC_END();
3030 }
3031 else
3032 {
3033 /*
3034 * XMM128, [mem128].
3035 */
3036 IEM_MC_BEGIN(0, 0);
3037 IEM_MC_LOCAL(RTUINT128U, uSrc);
3038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3039
3040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3042 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3043 IEM_MC_PREPARE_SSE_USAGE();
3044
3045 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3046 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3047 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3048 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3049 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3050
3051 IEM_MC_ADVANCE_RIP_AND_FINISH();
3052 IEM_MC_END();
3053 }
3054}
3055
3056/**
3057 * @opdone
3058 * @opmnemonic udf30f16
3059 * @opcode 0x16
3060 * @oppfx 0xf2
3061 * @opunused intel-modrm
3062 * @opcpuid sse
3063 * @optest ->
3064 * @opdone
3065 */
3066
3067
3068/**
3069 * @opcode 0x17
3070 * @opcodesub !11 mr/reg
3071 * @oppfx none
3072 * @opcpuid sse
3073 * @opgroup og_sse_simdfp_datamove
3074 * @opxcpttype 5
3075 * @optest op1=1 op2=2 -> op1=2
3076 * @optest op1=0 op2=-42 -> op1=-42
3077 */
3078FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3079{
3080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3081 if (IEM_IS_MODRM_MEM_MODE(bRm))
3082 {
3083 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3084
3085 IEM_MC_BEGIN(0, 0);
3086 IEM_MC_LOCAL(uint64_t, uSrc);
3087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3088
3089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3091 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3092 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3093
3094 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3095 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3096
3097 IEM_MC_ADVANCE_RIP_AND_FINISH();
3098 IEM_MC_END();
3099 }
3100
3101 /**
3102 * @opdone
3103 * @opmnemonic ud0f17m3
3104 * @opcode 0x17
3105 * @opcodesub 11 mr/reg
3106 * @oppfx none
3107 * @opunused immediate
3108 * @opcpuid sse
3109 * @optest ->
3110 */
3111 else
3112 IEMOP_RAISE_INVALID_OPCODE_RET();
3113}
3114
3115
3116/**
3117 * @opcode 0x17
3118 * @opcodesub !11 mr/reg
3119 * @oppfx 0x66
3120 * @opcpuid sse2
3121 * @opgroup og_sse2_pcksclr_datamove
3122 * @opxcpttype 5
3123 * @optest op1=1 op2=2 -> op1=2
3124 * @optest op1=0 op2=-42 -> op1=-42
3125 */
3126FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3127{
3128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3129 if (IEM_IS_MODRM_MEM_MODE(bRm))
3130 {
3131 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3132
3133 IEM_MC_BEGIN(0, 0);
3134 IEM_MC_LOCAL(uint64_t, uSrc);
3135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3136
3137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3139 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3140 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3141
3142 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3143 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3144
3145 IEM_MC_ADVANCE_RIP_AND_FINISH();
3146 IEM_MC_END();
3147 }
3148
3149 /**
3150 * @opdone
3151 * @opmnemonic ud660f17m3
3152 * @opcode 0x17
3153 * @opcodesub 11 mr/reg
3154 * @oppfx 0x66
3155 * @opunused immediate
3156 * @opcpuid sse
3157 * @optest ->
3158 */
3159 else
3160 IEMOP_RAISE_INVALID_OPCODE_RET();
3161}
3162
3163
3164/**
3165 * @opdone
3166 * @opmnemonic udf30f17
3167 * @opcode 0x17
3168 * @oppfx 0xf3
3169 * @opunused intel-modrm
3170 * @opcpuid sse
3171 * @optest ->
3172 * @opdone
3173 */
3174
3175/**
3176 * @opmnemonic udf20f17
3177 * @opcode 0x17
3178 * @oppfx 0xf2
3179 * @opunused intel-modrm
3180 * @opcpuid sse
3181 * @optest ->
3182 * @opdone
3183 */
3184
3185
3186/** Opcode 0x0f 0x18. */
3187FNIEMOP_DEF(iemOp_prefetch_Grp16)
3188{
3189 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3190 if (IEM_IS_MODRM_MEM_MODE(bRm))
3191 {
3192 switch (IEM_GET_MODRM_REG_8(bRm))
3193 {
3194 case 4: /* Aliased to /0 for the time being according to AMD. */
3195 case 5: /* Aliased to /0 for the time being according to AMD. */
3196 case 6: /* Aliased to /0 for the time being according to AMD. */
3197 case 7: /* Aliased to /0 for the time being according to AMD. */
3198 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3199 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3200 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3201 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3203 }
3204
3205 IEM_MC_BEGIN(0, 0);
3206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3209 /* Currently a NOP. */
3210 IEM_MC_NOREF(GCPtrEffSrc);
3211 IEM_MC_ADVANCE_RIP_AND_FINISH();
3212 IEM_MC_END();
3213 }
3214 else
3215 IEMOP_RAISE_INVALID_OPCODE_RET();
3216}
3217
3218
3219/** Opcode 0x0f 0x19..0x1f. */
3220FNIEMOP_DEF(iemOp_nop_Ev)
3221{
3222 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3223 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3224 if (IEM_IS_MODRM_REG_MODE(bRm))
3225 {
3226 IEM_MC_BEGIN(0, 0);
3227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3228 IEM_MC_ADVANCE_RIP_AND_FINISH();
3229 IEM_MC_END();
3230 }
3231 else
3232 {
3233 IEM_MC_BEGIN(0, 0);
3234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3237 /* Currently a NOP. */
3238 IEM_MC_NOREF(GCPtrEffSrc);
3239 IEM_MC_ADVANCE_RIP_AND_FINISH();
3240 IEM_MC_END();
3241 }
3242}
3243
3244
3245/** Opcode 0x0f 0x20. */
3246FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3247{
3248 /* mod is ignored, as is operand size overrides. */
3249/** @todo testcase: check memory encoding. */
3250 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3251 IEMOP_HLP_MIN_386();
3252 if (IEM_IS_64BIT_CODE(pVCpu))
3253 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3254 else
3255 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3256
3257 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3258 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3259 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3260 {
3261 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3262 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3263 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3264 iCrReg |= 8;
3265 }
3266 switch (iCrReg)
3267 {
3268 case 0: case 2: case 3: case 4: case 8:
3269 break;
3270 default:
3271 IEMOP_RAISE_INVALID_OPCODE_RET();
3272 }
3273 IEMOP_HLP_DONE_DECODING();
3274
3275 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3276 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3277 iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3278}
3279
3280
3281/** Opcode 0x0f 0x21. */
3282FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3283{
3284/** @todo testcase: check memory encoding. */
3285 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3286 IEMOP_HLP_MIN_386();
3287 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3289 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3290 IEMOP_RAISE_INVALID_OPCODE_RET();
3291 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3292 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3293 iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3294}
3295
3296
3297/** Opcode 0x0f 0x22. */
3298FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3299{
3300 /* mod is ignored, as is operand size overrides. */
3301 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3302 IEMOP_HLP_MIN_386();
3303 if (IEM_IS_64BIT_CODE(pVCpu))
3304 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3305 else
3306 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3307
3308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3309 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3310 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3311 {
3312 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3313 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3314 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3315 iCrReg |= 8;
3316 }
3317 switch (iCrReg)
3318 {
3319 case 0: case 2: case 3: case 4: case 8:
3320 break;
3321 default:
3322 IEMOP_RAISE_INVALID_OPCODE_RET();
3323 }
3324 IEMOP_HLP_DONE_DECODING();
3325
3326 /** @todo r=aeichner Split this up as flushing the cr0 is excessive for crX != 0? */
3327 if (iCrReg & (2 | 8))
3328 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, 0,
3329 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3330 else
3331 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0) | RT_BIT_64(kIemNativeGstReg_Cr4),
3332 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3333}
3334
3335
3336/** Opcode 0x0f 0x23. */
3337FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3338{
3339 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3340 IEMOP_HLP_MIN_386();
3341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3343 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3344 IEMOP_RAISE_INVALID_OPCODE_RET();
3345 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, 0,
3346 iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3347}
3348
3349
3350/** Opcode 0x0f 0x24. */
3351FNIEMOP_DEF(iemOp_mov_Rd_Td)
3352{
3353 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3354 IEMOP_HLP_MIN_386();
3355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3357 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3358 IEMOP_RAISE_INVALID_OPCODE_RET();
3359 IEM_MC_DEFER_TO_CIMPL_2_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3360 iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3361}
3362
3363
3364/** Opcode 0x0f 0x26. */
3365FNIEMOP_DEF(iemOp_mov_Td_Rd)
3366{
3367 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3368 IEMOP_HLP_MIN_386();
3369 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3371 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3372 IEMOP_RAISE_INVALID_OPCODE_RET();
3373 IEM_MC_DEFER_TO_CIMPL_2_RET(0, 0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3374}
3375
3376
3377/**
3378 * @opcode 0x28
3379 * @oppfx none
3380 * @opcpuid sse
3381 * @opgroup og_sse_simdfp_datamove
3382 * @opxcpttype 1
3383 * @optest op1=1 op2=2 -> op1=2
3384 * @optest op1=0 op2=-42 -> op1=-42
3385 */
3386FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3387{
3388 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3390 if (IEM_IS_MODRM_REG_MODE(bRm))
3391 {
3392 /*
3393 * Register, register.
3394 */
3395 IEM_MC_BEGIN(0, 0);
3396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3397 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3398 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3399 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3400 IEM_GET_MODRM_RM(pVCpu, bRm));
3401 IEM_MC_ADVANCE_RIP_AND_FINISH();
3402 IEM_MC_END();
3403 }
3404 else
3405 {
3406 /*
3407 * Register, memory.
3408 */
3409 IEM_MC_BEGIN(0, 0);
3410 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3412
3413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3415 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3416 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3417
3418 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3419 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3420
3421 IEM_MC_ADVANCE_RIP_AND_FINISH();
3422 IEM_MC_END();
3423 }
3424}
3425
3426/**
3427 * @opcode 0x28
3428 * @oppfx 66
3429 * @opcpuid sse2
3430 * @opgroup og_sse2_pcksclr_datamove
3431 * @opxcpttype 1
3432 * @optest op1=1 op2=2 -> op1=2
3433 * @optest op1=0 op2=-42 -> op1=-42
3434 */
3435FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3436{
3437 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3438 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3439 if (IEM_IS_MODRM_REG_MODE(bRm))
3440 {
3441 /*
3442 * Register, register.
3443 */
3444 IEM_MC_BEGIN(0, 0);
3445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3446 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3447 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3448 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3449 IEM_GET_MODRM_RM(pVCpu, bRm));
3450 IEM_MC_ADVANCE_RIP_AND_FINISH();
3451 IEM_MC_END();
3452 }
3453 else
3454 {
3455 /*
3456 * Register, memory.
3457 */
3458 IEM_MC_BEGIN(0, 0);
3459 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3461
3462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3466
3467 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3468 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3469
3470 IEM_MC_ADVANCE_RIP_AND_FINISH();
3471 IEM_MC_END();
3472 }
3473}
3474
3475/* Opcode 0xf3 0x0f 0x28 - invalid */
3476/* Opcode 0xf2 0x0f 0x28 - invalid */
3477
3478/**
3479 * @opcode 0x29
3480 * @oppfx none
3481 * @opcpuid sse
3482 * @opgroup og_sse_simdfp_datamove
3483 * @opxcpttype 1
3484 * @optest op1=1 op2=2 -> op1=2
3485 * @optest op1=0 op2=-42 -> op1=-42
3486 */
3487FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3488{
3489 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3491 if (IEM_IS_MODRM_REG_MODE(bRm))
3492 {
3493 /*
3494 * Register, register.
3495 */
3496 IEM_MC_BEGIN(0, 0);
3497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3498 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3499 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3500 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3501 IEM_GET_MODRM_REG(pVCpu, bRm));
3502 IEM_MC_ADVANCE_RIP_AND_FINISH();
3503 IEM_MC_END();
3504 }
3505 else
3506 {
3507 /*
3508 * Memory, register.
3509 */
3510 IEM_MC_BEGIN(0, 0);
3511 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3513
3514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3516 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3517 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3518
3519 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3520 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3521
3522 IEM_MC_ADVANCE_RIP_AND_FINISH();
3523 IEM_MC_END();
3524 }
3525}
3526
3527/**
3528 * @opcode 0x29
3529 * @oppfx 66
3530 * @opcpuid sse2
3531 * @opgroup og_sse2_pcksclr_datamove
3532 * @opxcpttype 1
3533 * @optest op1=1 op2=2 -> op1=2
3534 * @optest op1=0 op2=-42 -> op1=-42
3535 */
3536FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3537{
3538 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3540 if (IEM_IS_MODRM_REG_MODE(bRm))
3541 {
3542 /*
3543 * Register, register.
3544 */
3545 IEM_MC_BEGIN(0, 0);
3546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3547 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3548 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3549 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3550 IEM_GET_MODRM_REG(pVCpu, bRm));
3551 IEM_MC_ADVANCE_RIP_AND_FINISH();
3552 IEM_MC_END();
3553 }
3554 else
3555 {
3556 /*
3557 * Memory, register.
3558 */
3559 IEM_MC_BEGIN(0, 0);
3560 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3562
3563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3565 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3566 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3567
3568 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3569 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3570
3571 IEM_MC_ADVANCE_RIP_AND_FINISH();
3572 IEM_MC_END();
3573 }
3574}
3575
3576/* Opcode 0xf3 0x0f 0x29 - invalid */
3577/* Opcode 0xf2 0x0f 0x29 - invalid */
3578
3579
3580/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3581FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3582{
3583 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3585 if (IEM_IS_MODRM_REG_MODE(bRm))
3586 {
3587 /*
3588 * XMM, MMX
3589 */
3590 IEM_MC_BEGIN(0, 0);
3591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3592 IEM_MC_LOCAL(X86XMMREG, Dst);
3593 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3594 IEM_MC_ARG(uint64_t, u64Src, 1);
3595 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3596 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3597 IEM_MC_PREPARE_FPU_USAGE();
3598 IEM_MC_FPU_TO_MMX_MODE();
3599
3600 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3601 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3602
3603 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2ps_u128, pDst, u64Src);
3604 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3605
3606 IEM_MC_ADVANCE_RIP_AND_FINISH();
3607 IEM_MC_END();
3608 }
3609 else
3610 {
3611 /*
3612 * XMM, [mem64]
3613 */
3614 IEM_MC_BEGIN(0, 0);
3615 IEM_MC_LOCAL(X86XMMREG, Dst);
3616 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3617 IEM_MC_ARG(uint64_t, u64Src, 1);
3618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3619
3620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3622 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3623 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3624
3625 IEM_MC_PREPARE_FPU_USAGE();
3626
3627 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3628 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3629
3630 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2ps_u128, pDst, u64Src);
3631 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3632
3633 IEM_MC_ADVANCE_RIP_AND_FINISH();
3634 IEM_MC_END();
3635 }
3636}
3637
3638
3639/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3640FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3641{
3642 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3643 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3644 if (IEM_IS_MODRM_REG_MODE(bRm))
3645 {
3646 /*
3647 * XMM, MMX
3648 */
3649 IEM_MC_BEGIN(0, 0);
3650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3651 IEM_MC_LOCAL(X86XMMREG, Dst);
3652 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3653 IEM_MC_ARG(uint64_t, u64Src, 1);
3654 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3655 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3656 IEM_MC_PREPARE_FPU_USAGE();
3657 IEM_MC_FPU_TO_MMX_MODE();
3658
3659 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3660
3661 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2pd_u128, pDst, u64Src);
3662 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3663
3664 IEM_MC_ADVANCE_RIP_AND_FINISH();
3665 IEM_MC_END();
3666 }
3667 else
3668 {
3669 /*
3670 * XMM, [mem64]
3671 */
3672 IEM_MC_BEGIN(0, 0);
3673 IEM_MC_LOCAL(X86XMMREG, Dst);
3674 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3675 IEM_MC_ARG(uint64_t, u64Src, 1);
3676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3677
3678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3680 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3681 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3682 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3683
3684 /* Doesn't cause a transition to MMX mode. */
3685 IEM_MC_PREPARE_SSE_USAGE();
3686
3687 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2pd_u128, pDst, u64Src);
3688 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3689
3690 IEM_MC_ADVANCE_RIP_AND_FINISH();
3691 IEM_MC_END();
3692 }
3693}
3694
3695
3696/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3697FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3698{
3699 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3700
3701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3702 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3703 {
3704 if (IEM_IS_MODRM_REG_MODE(bRm))
3705 {
3706 /* XMM, greg64 */
3707 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3708 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3709 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3710 IEM_MC_ARG(const int64_t *, pi64Src, 1);
3711
3712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3713 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3714 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3715
3716 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3717 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i64, pr32Dst, pi64Src);
3718 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3719
3720 IEM_MC_ADVANCE_RIP_AND_FINISH();
3721 IEM_MC_END();
3722 }
3723 else
3724 {
3725 /* XMM, [mem64] */
3726 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3728 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3729 IEM_MC_LOCAL(int64_t, i64Src);
3730 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3731 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 1);
3732
3733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3735 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3736 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3737
3738 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3739 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i64, pr32Dst, pi64Src);
3740 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3741
3742 IEM_MC_ADVANCE_RIP_AND_FINISH();
3743 IEM_MC_END();
3744 }
3745 }
3746 else
3747 {
3748 if (IEM_IS_MODRM_REG_MODE(bRm))
3749 {
3750 /* greg, XMM */
3751 IEM_MC_BEGIN(0, 0);
3752 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3753 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3754 IEM_MC_ARG(const int32_t *, pi32Src, 1);
3755
3756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3757 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3758 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3759
3760 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3761 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i32, pr32Dst, pi32Src);
3762 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3763
3764 IEM_MC_ADVANCE_RIP_AND_FINISH();
3765 IEM_MC_END();
3766 }
3767 else
3768 {
3769 /* greg, [mem32] */
3770 IEM_MC_BEGIN(0, 0);
3771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3772 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3773 IEM_MC_LOCAL(int32_t, i32Src);
3774 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3775 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 1);
3776
3777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3779 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3780 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3781
3782 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3783 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i32, pr32Dst, pi32Src);
3784 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3785
3786 IEM_MC_ADVANCE_RIP_AND_FINISH();
3787 IEM_MC_END();
3788 }
3789 }
3790}
3791
3792
3793/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3794FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3795{
3796 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3797
3798 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3799 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3800 {
3801 if (IEM_IS_MODRM_REG_MODE(bRm))
3802 {
3803 /* XMM, greg64 */
3804 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3805 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3806 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3807 IEM_MC_ARG(const int64_t *, pi64Src, 1);
3808
3809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3810 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3811 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3812
3813 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3814 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i64, pr64Dst, pi64Src);
3815 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3816
3817 IEM_MC_ADVANCE_RIP_AND_FINISH();
3818 IEM_MC_END();
3819 }
3820 else
3821 {
3822 /* XMM, [mem64] */
3823 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3825 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3826 IEM_MC_LOCAL(int64_t, i64Src);
3827 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3828 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 1);
3829
3830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3832 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3833 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3834
3835 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3836 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i64, pr64Dst, pi64Src);
3837 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3838
3839 IEM_MC_ADVANCE_RIP_AND_FINISH();
3840 IEM_MC_END();
3841 }
3842 }
3843 else
3844 {
3845 if (IEM_IS_MODRM_REG_MODE(bRm))
3846 {
3847 /* XMM, greg32 */
3848 IEM_MC_BEGIN(0, 0);
3849 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3850 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3851 IEM_MC_ARG(const int32_t *, pi32Src, 1);
3852
3853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3854 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3855 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3856
3857 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3858 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i32, pr64Dst, pi32Src);
3859 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3860
3861 IEM_MC_ADVANCE_RIP_AND_FINISH();
3862 IEM_MC_END();
3863 }
3864 else
3865 {
3866 /* XMM, [mem32] */
3867 IEM_MC_BEGIN(0, 0);
3868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3869 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3870 IEM_MC_LOCAL(int32_t, i32Src);
3871 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3872 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 1);
3873
3874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3876 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3877 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3878
3879 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3880 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i32, pr64Dst, pi32Src);
3881 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3882
3883 IEM_MC_ADVANCE_RIP_AND_FINISH();
3884 IEM_MC_END();
3885 }
3886 }
3887}
3888
3889
3890/**
3891 * @opcode 0x2b
3892 * @opcodesub !11 mr/reg
3893 * @oppfx none
3894 * @opcpuid sse
3895 * @opgroup og_sse1_cachect
3896 * @opxcpttype 1
3897 * @optest op1=1 op2=2 -> op1=2
3898 * @optest op1=0 op2=-42 -> op1=-42
3899 */
3900FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
3901{
3902 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3903 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3904 if (IEM_IS_MODRM_MEM_MODE(bRm))
3905 {
3906 /*
3907 * memory, register.
3908 */
3909 IEM_MC_BEGIN(0, 0);
3910 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3912
3913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3915 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3916 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3917
3918 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3919 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3920
3921 IEM_MC_ADVANCE_RIP_AND_FINISH();
3922 IEM_MC_END();
3923 }
3924 /* The register, register encoding is invalid. */
3925 else
3926 IEMOP_RAISE_INVALID_OPCODE_RET();
3927}
3928
3929/**
3930 * @opcode 0x2b
3931 * @opcodesub !11 mr/reg
3932 * @oppfx 0x66
3933 * @opcpuid sse2
3934 * @opgroup og_sse2_cachect
3935 * @opxcpttype 1
3936 * @optest op1=1 op2=2 -> op1=2
3937 * @optest op1=0 op2=-42 -> op1=-42
3938 */
3939FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
3940{
3941 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3943 if (IEM_IS_MODRM_MEM_MODE(bRm))
3944 {
3945 /*
3946 * memory, register.
3947 */
3948 IEM_MC_BEGIN(0, 0);
3949 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3951
3952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3954 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3955 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3956
3957 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3958 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3959
3960 IEM_MC_ADVANCE_RIP_AND_FINISH();
3961 IEM_MC_END();
3962 }
3963 /* The register, register encoding is invalid. */
3964 else
3965 IEMOP_RAISE_INVALID_OPCODE_RET();
3966}
3967/* Opcode 0xf3 0x0f 0x2b - invalid */
3968/* Opcode 0xf2 0x0f 0x2b - invalid */
3969
3970
3971/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
3972FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
3973{
3974 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3975 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3976 if (IEM_IS_MODRM_REG_MODE(bRm))
3977 {
3978 /*
3979 * Register, register.
3980 */
3981 IEM_MC_BEGIN(0, 0);
3982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3983 IEM_MC_LOCAL(uint64_t, u64Dst);
3984 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
3985 IEM_MC_ARG(uint64_t, u64Src, 1);
3986 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3987 IEM_MC_PREPARE_FPU_USAGE();
3988 IEM_MC_FPU_TO_MMX_MODE();
3989
3990 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
3991
3992 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttps2pi_u128, pu64Dst, u64Src);
3993 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
3994
3995 IEM_MC_ADVANCE_RIP_AND_FINISH();
3996 IEM_MC_END();
3997 }
3998 else
3999 {
4000 /*
4001 * Register, memory.
4002 */
4003 IEM_MC_BEGIN(0, 0);
4004 IEM_MC_LOCAL(uint64_t, u64Dst);
4005 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4006 IEM_MC_ARG(uint64_t, u64Src, 1);
4007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4008
4009 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4011 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4012 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4013
4014 IEM_MC_PREPARE_FPU_USAGE();
4015 IEM_MC_FPU_TO_MMX_MODE();
4016
4017 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttps2pi_u128, pu64Dst, u64Src);
4018 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4019
4020 IEM_MC_ADVANCE_RIP_AND_FINISH();
4021 IEM_MC_END();
4022 }
4023}
4024
4025
4026/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4027FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4028{
4029 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4030 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4031 if (IEM_IS_MODRM_REG_MODE(bRm))
4032 {
4033 /*
4034 * Register, register.
4035 */
4036 IEM_MC_BEGIN(0, 0);
4037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4038 IEM_MC_LOCAL(uint64_t, u64Dst);
4039 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4040 IEM_MC_ARG(PCX86XMMREG, pSrc, 1);
4041 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4042 IEM_MC_PREPARE_FPU_USAGE();
4043 IEM_MC_FPU_TO_MMX_MODE();
4044
4045 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4046
4047 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttpd2pi_u128, pu64Dst, pSrc);
4048 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4049
4050 IEM_MC_ADVANCE_RIP_AND_FINISH();
4051 IEM_MC_END();
4052 }
4053 else
4054 {
4055 /*
4056 * Register, memory.
4057 */
4058 IEM_MC_BEGIN(0, 0);
4059 IEM_MC_LOCAL(uint64_t, u64Dst);
4060 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4061 IEM_MC_LOCAL(X86XMMREG, uSrc);
4062 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 1);
4063 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4064
4065 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4067 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4068 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4069
4070 IEM_MC_PREPARE_FPU_USAGE();
4071 IEM_MC_FPU_TO_MMX_MODE();
4072
4073 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttpd2pi_u128, pu64Dst, pSrc);
4074 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4075
4076 IEM_MC_ADVANCE_RIP_AND_FINISH();
4077 IEM_MC_END();
4078 }
4079}
4080
4081
4082/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4083FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4084{
4085 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4086
4087 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4088 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4089 {
4090 if (IEM_IS_MODRM_REG_MODE(bRm))
4091 {
4092 /* greg64, XMM */
4093 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4094 IEM_MC_LOCAL(int64_t, i64Dst);
4095 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4096 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4097
4098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4099 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4100 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4101
4102 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4103 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i64_r32, pi64Dst, pu32Src);
4104 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4105
4106 IEM_MC_ADVANCE_RIP_AND_FINISH();
4107 IEM_MC_END();
4108 }
4109 else
4110 {
4111 /* greg64, [mem64] */
4112 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4114 IEM_MC_LOCAL(int64_t, i64Dst);
4115 IEM_MC_LOCAL(uint32_t, u32Src);
4116 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4117 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4118
4119 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4121 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4122 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4123
4124 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4125 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i64_r32, pi64Dst, pu32Src);
4126 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4127
4128 IEM_MC_ADVANCE_RIP_AND_FINISH();
4129 IEM_MC_END();
4130 }
4131 }
4132 else
4133 {
4134 if (IEM_IS_MODRM_REG_MODE(bRm))
4135 {
4136 /* greg, XMM */
4137 IEM_MC_BEGIN(0, 0);
4138 IEM_MC_LOCAL(int32_t, i32Dst);
4139 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4140 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4141
4142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4143 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4144 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4145
4146 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4147 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i32_r32, pi32Dst, pu32Src);
4148 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4149
4150 IEM_MC_ADVANCE_RIP_AND_FINISH();
4151 IEM_MC_END();
4152 }
4153 else
4154 {
4155 /* greg, [mem] */
4156 IEM_MC_BEGIN(0, 0);
4157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4158 IEM_MC_LOCAL(int32_t, i32Dst);
4159 IEM_MC_LOCAL(uint32_t, u32Src);
4160 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4161 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4162
4163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4165 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4166 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4167
4168 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4169 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i32_r32, pi32Dst, pu32Src);
4170 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4171
4172 IEM_MC_ADVANCE_RIP_AND_FINISH();
4173 IEM_MC_END();
4174 }
4175 }
4176}
4177
4178
4179/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4180FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4181{
4182 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4183
4184 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4185 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4186 {
4187 if (IEM_IS_MODRM_REG_MODE(bRm))
4188 {
4189 /* greg64, XMM */
4190 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4191 IEM_MC_LOCAL(int64_t, i64Dst);
4192 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4193 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4194
4195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4196 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4197 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4198
4199 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4200 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i64_r64, pi64Dst, pu64Src);
4201 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4202
4203 IEM_MC_ADVANCE_RIP_AND_FINISH();
4204 IEM_MC_END();
4205 }
4206 else
4207 {
4208 /* greg64, [mem64] */
4209 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4211 IEM_MC_LOCAL(int64_t, i64Dst);
4212 IEM_MC_LOCAL(uint64_t, u64Src);
4213 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4214 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4215
4216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4218 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4219 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4220
4221 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4222 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i64_r64, pi64Dst, pu64Src);
4223 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4224
4225 IEM_MC_ADVANCE_RIP_AND_FINISH();
4226 IEM_MC_END();
4227 }
4228 }
4229 else
4230 {
4231 if (IEM_IS_MODRM_REG_MODE(bRm))
4232 {
4233 /* greg, XMM */
4234 IEM_MC_BEGIN(0, 0);
4235 IEM_MC_LOCAL(int32_t, i32Dst);
4236 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4237 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4238
4239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4240 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4241 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4242
4243 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4244 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i32_r64, pi32Dst, pu64Src);
4245 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4246
4247 IEM_MC_ADVANCE_RIP_AND_FINISH();
4248 IEM_MC_END();
4249 }
4250 else
4251 {
4252 /* greg32, [mem32] */
4253 IEM_MC_BEGIN(0, 0);
4254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4255 IEM_MC_LOCAL(int32_t, i32Dst);
4256 IEM_MC_LOCAL(uint64_t, u64Src);
4257 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4258 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4259
4260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4262 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4263 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4264
4265 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4266 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i32_r64, pi32Dst, pu64Src);
4267 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4268
4269 IEM_MC_ADVANCE_RIP_AND_FINISH();
4270 IEM_MC_END();
4271 }
4272 }
4273}
4274
4275
4276/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4277FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4278{
4279 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4280 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4281 if (IEM_IS_MODRM_REG_MODE(bRm))
4282 {
4283 /*
4284 * Register, register.
4285 */
4286 IEM_MC_BEGIN(0, 0);
4287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4288 IEM_MC_LOCAL(uint64_t, u64Dst);
4289 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4290 IEM_MC_ARG(uint64_t, u64Src, 1);
4291
4292 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4293 IEM_MC_PREPARE_FPU_USAGE();
4294 IEM_MC_FPU_TO_MMX_MODE();
4295
4296 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4297
4298 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pi_u128, pu64Dst, u64Src);
4299 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4300
4301 IEM_MC_ADVANCE_RIP_AND_FINISH();
4302 IEM_MC_END();
4303 }
4304 else
4305 {
4306 /*
4307 * Register, memory.
4308 */
4309 IEM_MC_BEGIN(0, 0);
4310 IEM_MC_LOCAL(uint64_t, u64Dst);
4311 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4312 IEM_MC_ARG(uint64_t, u64Src, 1);
4313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4314
4315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4317 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4318 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4319
4320 IEM_MC_PREPARE_FPU_USAGE();
4321 IEM_MC_FPU_TO_MMX_MODE();
4322
4323 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pi_u128, pu64Dst, u64Src);
4324 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4325
4326 IEM_MC_ADVANCE_RIP_AND_FINISH();
4327 IEM_MC_END();
4328 }
4329}
4330
4331
4332/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4333FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4334{
4335 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4336 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4337 if (IEM_IS_MODRM_REG_MODE(bRm))
4338 {
4339 /*
4340 * Register, register.
4341 */
4342 IEM_MC_BEGIN(0, 0);
4343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4344 IEM_MC_LOCAL(uint64_t, u64Dst);
4345 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4346 IEM_MC_ARG(PCX86XMMREG, pSrc, 1);
4347
4348 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4349 IEM_MC_PREPARE_FPU_USAGE();
4350 IEM_MC_FPU_TO_MMX_MODE();
4351
4352 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4353
4354 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpd2pi_u128, pu64Dst, pSrc);
4355 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4356
4357 IEM_MC_ADVANCE_RIP_AND_FINISH();
4358 IEM_MC_END();
4359 }
4360 else
4361 {
4362 /*
4363 * Register, memory.
4364 */
4365 IEM_MC_BEGIN(0, 0);
4366 IEM_MC_LOCAL(uint64_t, u64Dst);
4367 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4368 IEM_MC_LOCAL(X86XMMREG, uSrc);
4369 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 1);
4370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4371
4372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4374 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4375 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4376
4377 IEM_MC_PREPARE_FPU_USAGE();
4378 IEM_MC_FPU_TO_MMX_MODE();
4379
4380 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpd2pi_u128, pu64Dst, pSrc);
4381 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4382
4383 IEM_MC_ADVANCE_RIP_AND_FINISH();
4384 IEM_MC_END();
4385 }
4386}
4387
4388
4389/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4390FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4391{
4392 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4393
4394 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4395 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4396 {
4397 if (IEM_IS_MODRM_REG_MODE(bRm))
4398 {
4399 /* greg64, XMM */
4400 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4401 IEM_MC_LOCAL(int64_t, i64Dst);
4402 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4403 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4404
4405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4406 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4407 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4408
4409 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4410 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i64_r32, pi64Dst, pu32Src);
4411 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4412
4413 IEM_MC_ADVANCE_RIP_AND_FINISH();
4414 IEM_MC_END();
4415 }
4416 else
4417 {
4418 /* greg64, [mem64] */
4419 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4420 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4421 IEM_MC_LOCAL(int64_t, i64Dst);
4422 IEM_MC_LOCAL(uint32_t, u32Src);
4423 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4424 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4425
4426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4428 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4429 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4430
4431 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4432 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i64_r32, pi64Dst, pu32Src);
4433 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4434
4435 IEM_MC_ADVANCE_RIP_AND_FINISH();
4436 IEM_MC_END();
4437 }
4438 }
4439 else
4440 {
4441 if (IEM_IS_MODRM_REG_MODE(bRm))
4442 {
4443 /* greg, XMM */
4444 IEM_MC_BEGIN(0, 0);
4445 IEM_MC_LOCAL(int32_t, i32Dst);
4446 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4447 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4448
4449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4450 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4451 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4452
4453 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4454 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i32_r32, pi32Dst, pu32Src);
4455 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4456
4457 IEM_MC_ADVANCE_RIP_AND_FINISH();
4458 IEM_MC_END();
4459 }
4460 else
4461 {
4462 /* greg, [mem] */
4463 IEM_MC_BEGIN(0, 0);
4464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4465 IEM_MC_LOCAL(int32_t, i32Dst);
4466 IEM_MC_LOCAL(uint32_t, u32Src);
4467 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4468 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4469
4470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4472 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4473 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4474
4475 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4476 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i32_r32, pi32Dst, pu32Src);
4477 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4478
4479 IEM_MC_ADVANCE_RIP_AND_FINISH();
4480 IEM_MC_END();
4481 }
4482 }
4483}
4484
4485
4486/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4487FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4488{
4489 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4490
4491 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4492 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4493 {
4494 if (IEM_IS_MODRM_REG_MODE(bRm))
4495 {
4496 /* greg64, XMM */
4497 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4498 IEM_MC_LOCAL(int64_t, i64Dst);
4499 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4500 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4501
4502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4503 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4504 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4505
4506 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4507 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i64_r64, pi64Dst, pu64Src);
4508 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4509
4510 IEM_MC_ADVANCE_RIP_AND_FINISH();
4511 IEM_MC_END();
4512 }
4513 else
4514 {
4515 /* greg64, [mem64] */
4516 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4518 IEM_MC_LOCAL(int64_t, i64Dst);
4519 IEM_MC_LOCAL(uint64_t, u64Src);
4520 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4521 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4522
4523 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4525 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4526 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4527
4528 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4529 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i64_r64, pi64Dst, pu64Src);
4530 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4531
4532 IEM_MC_ADVANCE_RIP_AND_FINISH();
4533 IEM_MC_END();
4534 }
4535 }
4536 else
4537 {
4538 if (IEM_IS_MODRM_REG_MODE(bRm))
4539 {
4540 /* greg32, XMM */
4541 IEM_MC_BEGIN(0, 0);
4542 IEM_MC_LOCAL(int32_t, i32Dst);
4543 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4544 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4545
4546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4547 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4548 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4549
4550 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4551 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i32_r64, pi32Dst, pu64Src);
4552 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4553
4554 IEM_MC_ADVANCE_RIP_AND_FINISH();
4555 IEM_MC_END();
4556 }
4557 else
4558 {
4559 /* greg32, [mem64] */
4560 IEM_MC_BEGIN(0, 0);
4561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4562 IEM_MC_LOCAL(int32_t, i32Dst);
4563 IEM_MC_LOCAL(uint64_t, u64Src);
4564 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4565 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4566
4567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4569 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4570 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4571
4572 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4573 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i32_r64, pi32Dst, pu64Src);
4574 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4575
4576 IEM_MC_ADVANCE_RIP_AND_FINISH();
4577 IEM_MC_END();
4578 }
4579 }
4580}
4581
4582
4583/**
4584 * @opcode 0x2e
4585 * @oppfx none
4586 * @opflmodify cf,pf,af,zf,sf,of
4587 * @opflclear af,sf,of
4588 */
4589FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4590{
4591 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4593 if (IEM_IS_MODRM_REG_MODE(bRm))
4594 {
4595 /*
4596 * Register, register.
4597 */
4598 IEM_MC_BEGIN(0, 0);
4599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4600 IEM_MC_LOCAL(uint32_t, fEFlags);
4601 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4602 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4603 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4604 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4605 IEM_MC_PREPARE_SSE_USAGE();
4606 IEM_MC_FETCH_EFLAGS(fEFlags);
4607 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4608 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
4609 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomiss_u128, pEFlags, uSrc1, uSrc2);
4610 IEM_MC_COMMIT_EFLAGS(fEFlags);
4611
4612 IEM_MC_ADVANCE_RIP_AND_FINISH();
4613 IEM_MC_END();
4614 }
4615 else
4616 {
4617 /*
4618 * Register, memory.
4619 */
4620 IEM_MC_BEGIN(0, 0);
4621 IEM_MC_LOCAL(uint32_t, fEFlags);
4622 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4623 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4624 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4626
4627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4629 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4630 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4631
4632 IEM_MC_PREPARE_SSE_USAGE();
4633 IEM_MC_FETCH_EFLAGS(fEFlags);
4634 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4635 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomiss_u128, pEFlags, uSrc1, uSrc2);
4636 IEM_MC_COMMIT_EFLAGS(fEFlags);
4637
4638 IEM_MC_ADVANCE_RIP_AND_FINISH();
4639 IEM_MC_END();
4640 }
4641}
4642
4643
4644/**
4645 * @opcode 0x2e
4646 * @oppfx 0x66
4647 * @opflmodify cf,pf,af,zf,sf,of
4648 * @opflclear af,sf,of
4649 */
4650FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4651{
4652 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4653 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4654 if (IEM_IS_MODRM_REG_MODE(bRm))
4655 {
4656 /*
4657 * Register, register.
4658 */
4659 IEM_MC_BEGIN(0, 0);
4660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4661 IEM_MC_LOCAL(uint32_t, fEFlags);
4662 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4663 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4664 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4665 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4666 IEM_MC_PREPARE_SSE_USAGE();
4667 IEM_MC_FETCH_EFLAGS(fEFlags);
4668 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4669 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
4670 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomisd_u128, pEFlags, uSrc1, uSrc2);
4671 IEM_MC_COMMIT_EFLAGS(fEFlags);
4672
4673 IEM_MC_ADVANCE_RIP_AND_FINISH();
4674 IEM_MC_END();
4675 }
4676 else
4677 {
4678 /*
4679 * Register, memory.
4680 */
4681 IEM_MC_BEGIN(0, 0);
4682 IEM_MC_LOCAL(uint32_t, fEFlags);
4683 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4684 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4685 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4687
4688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4690 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4691 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4692
4693 IEM_MC_PREPARE_SSE_USAGE();
4694 IEM_MC_FETCH_EFLAGS(fEFlags);
4695 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4696 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomisd_u128, pEFlags, uSrc1, uSrc2);
4697 IEM_MC_COMMIT_EFLAGS(fEFlags);
4698
4699 IEM_MC_ADVANCE_RIP_AND_FINISH();
4700 IEM_MC_END();
4701 }
4702}
4703
4704
4705/* Opcode 0xf3 0x0f 0x2e - invalid */
4706/* Opcode 0xf2 0x0f 0x2e - invalid */
4707
4708
4709/**
4710 * @opcode 0x2e
4711 * @oppfx none
4712 * @opflmodify cf,pf,af,zf,sf,of
4713 * @opflclear af,sf,of
4714 */
4715FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
4716{
4717 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4718 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4719 if (IEM_IS_MODRM_REG_MODE(bRm))
4720 {
4721 /*
4722 * Register, register.
4723 */
4724 IEM_MC_BEGIN(0, 0);
4725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4726 IEM_MC_LOCAL(uint32_t, fEFlags);
4727 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4728 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4729 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4730 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4731 IEM_MC_PREPARE_SSE_USAGE();
4732 IEM_MC_FETCH_EFLAGS(fEFlags);
4733 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4734 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
4735 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comiss_u128, pEFlags, uSrc1, uSrc2);
4736 IEM_MC_COMMIT_EFLAGS(fEFlags);
4737
4738 IEM_MC_ADVANCE_RIP_AND_FINISH();
4739 IEM_MC_END();
4740 }
4741 else
4742 {
4743 /*
4744 * Register, memory.
4745 */
4746 IEM_MC_BEGIN(0, 0);
4747 IEM_MC_LOCAL(uint32_t, fEFlags);
4748 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4749 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4750 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4752
4753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4755 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4756 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4757
4758 IEM_MC_PREPARE_SSE_USAGE();
4759 IEM_MC_FETCH_EFLAGS(fEFlags);
4760 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4761 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comiss_u128, pEFlags, uSrc1, uSrc2);
4762 IEM_MC_COMMIT_EFLAGS(fEFlags);
4763
4764 IEM_MC_ADVANCE_RIP_AND_FINISH();
4765 IEM_MC_END();
4766 }
4767}
4768
4769
4770/**
4771 * @opcode 0x2f
4772 * @oppfx 0x66
4773 * @opflmodify cf,pf,af,zf,sf,of
4774 * @opflclear af,sf,of
4775 */
4776FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
4777{
4778 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4779 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4780 if (IEM_IS_MODRM_REG_MODE(bRm))
4781 {
4782 /*
4783 * Register, register.
4784 */
4785 IEM_MC_BEGIN(0, 0);
4786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4787 IEM_MC_LOCAL(uint32_t, fEFlags);
4788 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4789 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4790 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4791 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4792 IEM_MC_PREPARE_SSE_USAGE();
4793 IEM_MC_FETCH_EFLAGS(fEFlags);
4794 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4795 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
4796 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comisd_u128, pEFlags, uSrc1, uSrc2);
4797 IEM_MC_COMMIT_EFLAGS(fEFlags);
4798
4799 IEM_MC_ADVANCE_RIP_AND_FINISH();
4800 IEM_MC_END();
4801 }
4802 else
4803 {
4804 /*
4805 * Register, memory.
4806 */
4807 IEM_MC_BEGIN(0, 0);
4808 IEM_MC_LOCAL(uint32_t, fEFlags);
4809 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4810 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4811 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4813
4814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4816 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4817 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4818
4819 IEM_MC_PREPARE_SSE_USAGE();
4820 IEM_MC_FETCH_EFLAGS(fEFlags);
4821 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4822 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comisd_u128, pEFlags, uSrc1, uSrc2);
4823 IEM_MC_COMMIT_EFLAGS(fEFlags);
4824
4825 IEM_MC_ADVANCE_RIP_AND_FINISH();
4826 IEM_MC_END();
4827 }
4828}
4829
4830
4831/* Opcode 0xf3 0x0f 0x2f - invalid */
4832/* Opcode 0xf2 0x0f 0x2f - invalid */
4833
4834/** Opcode 0x0f 0x30. */
4835FNIEMOP_DEF(iemOp_wrmsr)
4836{
4837 IEMOP_MNEMONIC(wrmsr, "wrmsr");
4838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4839 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wrmsr);
4840}
4841
4842
4843/** Opcode 0x0f 0x31. */
4844FNIEMOP_DEF(iemOp_rdtsc)
4845{
4846 IEMOP_MNEMONIC(rdtsc, "rdtsc");
4847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4848 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4849 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4850 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4851 iemCImpl_rdtsc);
4852}
4853
4854
4855/** Opcode 0x0f 0x33. */
4856FNIEMOP_DEF(iemOp_rdmsr)
4857{
4858 IEMOP_MNEMONIC(rdmsr, "rdmsr");
4859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4860 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4861 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4862 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4863 iemCImpl_rdmsr);
4864}
4865
4866
4867/** Opcode 0x0f 0x34. */
4868FNIEMOP_DEF(iemOp_rdpmc)
4869{
4870 IEMOP_MNEMONIC(rdpmc, "rdpmc");
4871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4872 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4873 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4874 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4875 iemCImpl_rdpmc);
4876}
4877
4878
4879/** Opcode 0x0f 0x34. */
4880FNIEMOP_DEF(iemOp_sysenter)
4881{
4882 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4884 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
4885 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
4886 iemCImpl_sysenter);
4887}
4888
4889/** Opcode 0x0f 0x35. */
4890FNIEMOP_DEF(iemOp_sysexit)
4891{
4892 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4894 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
4895 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
4896 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
4897}
4898
4899/** Opcode 0x0f 0x37. */
4900FNIEMOP_STUB(iemOp_getsec);
4901
4902
4903/** Opcode 0x0f 0x38. */
4904FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
4905{
4906#ifdef IEM_WITH_THREE_0F_38
4907 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4908 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4909#else
4910 IEMOP_BITCH_ABOUT_STUB();
4911 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4912#endif
4913}
4914
4915
4916/** Opcode 0x0f 0x3a. */
4917FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
4918{
4919#ifdef IEM_WITH_THREE_0F_3A
4920 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4921 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4922#else
4923 IEMOP_BITCH_ABOUT_STUB();
4924 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4925#endif
4926}
4927
4928
4929/**
4930 * Implements a conditional move.
4931 *
4932 * Wish there was an obvious way to do this where we could share and reduce
4933 * code bloat.
4934 *
4935 * @param a_Cnd The conditional "microcode" operation.
4936 */
4937#define CMOV_X(a_Cnd) \
4938 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4939 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4940 { \
4941 switch (pVCpu->iem.s.enmEffOpSize) \
4942 { \
4943 case IEMMODE_16BIT: \
4944 IEM_MC_BEGIN(0, 0); \
4945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4946 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4947 a_Cnd { \
4948 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4949 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4950 } IEM_MC_ENDIF(); \
4951 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4952 IEM_MC_END(); \
4953 break; \
4954 \
4955 case IEMMODE_32BIT: \
4956 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4958 IEM_MC_LOCAL(uint32_t, u32Tmp); \
4959 a_Cnd { \
4960 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4961 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
4962 } IEM_MC_ELSE() { \
4963 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
4964 } IEM_MC_ENDIF(); \
4965 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4966 IEM_MC_END(); \
4967 break; \
4968 \
4969 case IEMMODE_64BIT: \
4970 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4972 IEM_MC_LOCAL(uint64_t, u64Tmp); \
4973 a_Cnd { \
4974 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4975 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
4976 } IEM_MC_ENDIF(); \
4977 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4978 IEM_MC_END(); \
4979 break; \
4980 \
4981 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4982 } \
4983 } \
4984 else \
4985 { \
4986 switch (pVCpu->iem.s.enmEffOpSize) \
4987 { \
4988 case IEMMODE_16BIT: \
4989 IEM_MC_BEGIN(0, 0); \
4990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4991 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4994 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4995 a_Cnd { \
4996 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4997 } IEM_MC_ENDIF(); \
4998 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4999 IEM_MC_END(); \
5000 break; \
5001 \
5002 case IEMMODE_32BIT: \
5003 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5005 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5008 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5009 a_Cnd { \
5010 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5011 } IEM_MC_ELSE() { \
5012 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5013 } IEM_MC_ENDIF(); \
5014 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5015 IEM_MC_END(); \
5016 break; \
5017 \
5018 case IEMMODE_64BIT: \
5019 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5021 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5024 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5025 a_Cnd { \
5026 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5027 } IEM_MC_ENDIF(); \
5028 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5029 IEM_MC_END(); \
5030 break; \
5031 \
5032 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5033 } \
5034 } do {} while (0)
5035
5036
5037
5038/**
5039 * @opcode 0x40
5040 * @opfltest of
5041 */
5042FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5043{
5044 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5045 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5046}
5047
5048
5049/**
5050 * @opcode 0x41
5051 * @opfltest of
5052 */
5053FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5054{
5055 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5056 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5057}
5058
5059
5060/**
5061 * @opcode 0x42
5062 * @opfltest cf
5063 */
5064FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5065{
5066 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5067 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5068}
5069
5070
5071/**
5072 * @opcode 0x43
5073 * @opfltest cf
5074 */
5075FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5076{
5077 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5078 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5079}
5080
5081
5082/**
5083 * @opcode 0x44
5084 * @opfltest zf
5085 */
5086FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5087{
5088 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5089 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5090}
5091
5092
5093/**
5094 * @opcode 0x45
5095 * @opfltest zf
5096 */
5097FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5098{
5099 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5100 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5101}
5102
5103
5104/**
5105 * @opcode 0x46
5106 * @opfltest cf,zf
5107 */
5108FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5109{
5110 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5111 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5112}
5113
5114
5115/**
5116 * @opcode 0x47
5117 * @opfltest cf,zf
5118 */
5119FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5120{
5121 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5122 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5123}
5124
5125
5126/**
5127 * @opcode 0x48
5128 * @opfltest sf
5129 */
5130FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5131{
5132 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5133 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5134}
5135
5136
5137/**
5138 * @opcode 0x49
5139 * @opfltest sf
5140 */
5141FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5142{
5143 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5144 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5145}
5146
5147
5148/**
5149 * @opcode 0x4a
5150 * @opfltest pf
5151 */
5152FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5153{
5154 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5155 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5156}
5157
5158
5159/**
5160 * @opcode 0x4b
5161 * @opfltest pf
5162 */
5163FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5164{
5165 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5166 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5167}
5168
5169
5170/**
5171 * @opcode 0x4c
5172 * @opfltest sf,of
5173 */
5174FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5175{
5176 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5177 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5178}
5179
5180
5181/**
5182 * @opcode 0x4d
5183 * @opfltest sf,of
5184 */
5185FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5186{
5187 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5188 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5189}
5190
5191
5192/**
5193 * @opcode 0x4e
5194 * @opfltest zf,sf,of
5195 */
5196FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5197{
5198 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5199 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5200}
5201
5202
5203/**
5204 * @opcode 0x4e
5205 * @opfltest zf,sf,of
5206 */
5207FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5208{
5209 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5210 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5211}
5212
5213#undef CMOV_X
5214
5215/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5216FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5217{
5218 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5220 if (IEM_IS_MODRM_REG_MODE(bRm))
5221 {
5222 /*
5223 * Register, register.
5224 */
5225 IEM_MC_BEGIN(0, 0);
5226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5227 IEM_MC_LOCAL(uint8_t, u8Dst);
5228 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5229 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5230 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5231 IEM_MC_PREPARE_SSE_USAGE();
5232 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5233 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5234 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5235 IEM_MC_ADVANCE_RIP_AND_FINISH();
5236 IEM_MC_END();
5237 }
5238 /* No memory operand. */
5239 else
5240 IEMOP_RAISE_INVALID_OPCODE_RET();
5241}
5242
5243
5244/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5245FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5246{
5247 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5248 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5249 if (IEM_IS_MODRM_REG_MODE(bRm))
5250 {
5251 /*
5252 * Register, register.
5253 */
5254 IEM_MC_BEGIN(0, 0);
5255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5256 IEM_MC_LOCAL(uint8_t, u8Dst);
5257 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5258 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5259 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5260 IEM_MC_PREPARE_SSE_USAGE();
5261 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5262 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5263 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5264 IEM_MC_ADVANCE_RIP_AND_FINISH();
5265 IEM_MC_END();
5266 }
5267 /* No memory operand. */
5268 else
5269 IEMOP_RAISE_INVALID_OPCODE_RET();
5270
5271}
5272
5273
5274/* Opcode 0xf3 0x0f 0x50 - invalid */
5275/* Opcode 0xf2 0x0f 0x50 - invalid */
5276
5277
5278/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5279FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5280{
5281 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5282 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5283}
5284
5285
5286/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5287FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5288{
5289 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5290 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5291}
5292
5293
5294/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5295FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5296{
5297 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5298 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5299}
5300
5301
5302/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5303FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5304{
5305 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5306 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5307}
5308
5309
5310/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5311FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5312{
5313 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5314 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5315}
5316
5317
5318/* Opcode 0x66 0x0f 0x52 - invalid */
5319
5320
5321/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5322FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5323{
5324 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5325 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5326}
5327
5328
5329/* Opcode 0xf2 0x0f 0x52 - invalid */
5330
5331
5332/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5333FNIEMOP_DEF(iemOp_rcpps_Vps_Wps)
5334{
5335 IEMOP_MNEMONIC2(RM, RCPPS, rcpps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5336 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rcpps_u128);
5337}
5338
5339
5340/* Opcode 0x66 0x0f 0x53 - invalid */
5341
5342
5343/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5344FNIEMOP_DEF(iemOp_rcpss_Vss_Wss)
5345{
5346 IEMOP_MNEMONIC2(RM, RCPSS, rcpss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5347 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rcpss_u128_r32);
5348}
5349
5350
5351/* Opcode 0xf2 0x0f 0x53 - invalid */
5352
5353
5354/** Opcode 0x0f 0x54 - andps Vps, Wps */
5355FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5356{
5357 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5358 SSE2_OPT_BODY_FullFull_To_Full(pand, iemAImpl_pand_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5359}
5360
5361
5362/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5363FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5364{
5365 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5366 SSE2_OPT_BODY_FullFull_To_Full(pand, iemAImpl_pand_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5367}
5368
5369
5370/* Opcode 0xf3 0x0f 0x54 - invalid */
5371/* Opcode 0xf2 0x0f 0x54 - invalid */
5372
5373
5374/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5375FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5376{
5377 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5378 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_pandn_u128);
5379}
5380
5381
5382/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5383FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5384{
5385 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5386 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pandn_u128);
5387}
5388
5389
5390/* Opcode 0xf3 0x0f 0x55 - invalid */
5391/* Opcode 0xf2 0x0f 0x55 - invalid */
5392
5393
5394/** Opcode 0x0f 0x56 - orps Vps, Wps */
5395FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5396{
5397 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5398 SSE2_OPT_BODY_FullFull_To_Full(por, iemAImpl_por_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5399}
5400
5401
5402/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5403FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5404{
5405 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5406 SSE2_OPT_BODY_FullFull_To_Full(por, iemAImpl_por_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5407}
5408
5409
5410/* Opcode 0xf3 0x0f 0x56 - invalid */
5411/* Opcode 0xf2 0x0f 0x56 - invalid */
5412
5413
5414/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5415FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5416{
5417 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5418 SSE2_OPT_BODY_FullFull_To_Full(pxor, iemAImpl_pxor_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5419}
5420
5421
5422/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5423FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5424{
5425 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5426 SSE2_OPT_BODY_FullFull_To_Full(pxor, iemAImpl_pxor_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5427}
5428
5429
5430/* Opcode 0xf3 0x0f 0x57 - invalid */
5431/* Opcode 0xf2 0x0f 0x57 - invalid */
5432
5433/** Opcode 0x0f 0x58 - addps Vps, Wps */
5434FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5435{
5436 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5437 SSE_FP_BODY_FullFull_To_Full(addps, iemAImpl_addps_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5438}
5439
5440
5441/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5442FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5443{
5444 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5445 SSE_FP_BODY_FullFull_To_Full(addpd, iemAImpl_addpd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5446}
5447
5448
5449/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5450FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5451{
5452 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5453 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5454}
5455
5456
5457/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5458FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5459{
5460 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5461 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5462}
5463
5464
5465/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5466FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5467{
5468 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5469 SSE_FP_BODY_FullFull_To_Full(mulps, iemAImpl_mulps_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5470}
5471
5472
5473/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5474FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5475{
5476 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5477 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5478}
5479
5480
5481/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5482FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5483{
5484 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5485 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5486}
5487
5488
5489/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5490FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5491{
5492 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5493 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5494}
5495
5496
5497/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5498FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5499{
5500 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd_WO, Wps, DISOPTYPE_HARMLESS, 0);
5501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5502 if (IEM_IS_MODRM_REG_MODE(bRm))
5503 {
5504 /*
5505 * XMM, XMM[63:0].
5506 */
5507 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
5508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5509 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5510 IEM_MC_PREPARE_SSE_USAGE();
5511
5512 IEM_MC_LOCAL(X86XMMREG, SseRes);
5513 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
5514 IEM_MC_ARG(uint64_t const *, pu64Src, 1); /* The input is actually two 32-bit float values, */
5515 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); /* but we've got no matching type or MC. */
5516 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pd_u128, pSseRes, pu64Src);
5517 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
5518
5519 IEM_MC_ADVANCE_RIP_AND_FINISH();
5520 IEM_MC_END();
5521 }
5522 else
5523 {
5524 /*
5525 * XMM, [mem64].
5526 */
5527 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
5528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5531 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5532
5533 IEM_MC_LOCAL(uint64_t, u64Src);
5534 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pu64Src, u64Src, 1); /* (see comment above wrt type) */
5535 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5536
5537 IEM_MC_PREPARE_SSE_USAGE();
5538 IEM_MC_LOCAL(X86XMMREG, SseRes);
5539 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
5540 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pd_u128, pSseRes, pu64Src);
5541 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
5542
5543 IEM_MC_ADVANCE_RIP_AND_FINISH();
5544 IEM_MC_END();
5545 }
5546}
5547
5548
5549/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5550FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5551{
5552 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps_WO, Wpd, DISOPTYPE_HARMLESS, 0);
5553 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5554 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5555}
5556
5557
5558/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5559FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5560{
5561 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5562 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5563}
5564
5565
5566/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5567FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5568{
5569 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5570 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5571}
5572
5573
5574/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5575FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5576{
5577 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5578 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5579 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5580}
5581
5582
5583/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5584FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5585{
5586 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5587 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5588 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5589}
5590
5591
5592/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5593FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5594{
5595 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5596 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5597 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5598}
5599
5600
5601/* Opcode 0xf2 0x0f 0x5b - invalid */
5602
5603
5604/** Opcode 0x0f 0x5c - subps Vps, Wps */
5605FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5606{
5607 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5608 SSE_FP_BODY_FullFull_To_Full(subps, iemAImpl_subps_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5609}
5610
5611
5612/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5613FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5614{
5615 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5616 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5617}
5618
5619
5620/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5621FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5622{
5623 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5624 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5625}
5626
5627
5628/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5629FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5630{
5631 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5632 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5633}
5634
5635
5636/** Opcode 0x0f 0x5d - minps Vps, Wps */
5637FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5638{
5639 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5640 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5641}
5642
5643
5644/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5645FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5646{
5647 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5648 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5649}
5650
5651
5652/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5653FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5654{
5655 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5656 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5657}
5658
5659
5660/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5661FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5662{
5663 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5664 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5665}
5666
5667
5668/** Opcode 0x0f 0x5e - divps Vps, Wps */
5669FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5670{
5671 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5672 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5673}
5674
5675
5676/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5677FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5678{
5679 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5680 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5681}
5682
5683
5684/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5685FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5686{
5687 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5688 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5689}
5690
5691
5692/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5693FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5694{
5695 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5696 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5697}
5698
5699
5700/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5701FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5702{
5703 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5704 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5705}
5706
5707
5708/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5709FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5710{
5711 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5712 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5713}
5714
5715
5716/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5717FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5718{
5719 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5720 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5721}
5722
5723
5724/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5725FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5726{
5727 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5728 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5729}
5730
5731
5732/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5733FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5734{
5735 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5736 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5737}
5738
5739
5740/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5741FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5742{
5743 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5744 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5745}
5746
5747
5748/* Opcode 0xf3 0x0f 0x60 - invalid */
5749
5750
5751/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5752FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5753{
5754 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5755 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5756 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5757}
5758
5759
5760/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5761FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5762{
5763 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5764 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5765}
5766
5767
5768/* Opcode 0xf3 0x0f 0x61 - invalid */
5769
5770
5771/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5772FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5773{
5774 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5775 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5776}
5777
5778
5779/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5780FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5781{
5782 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5783 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5784}
5785
5786
5787/* Opcode 0xf3 0x0f 0x62 - invalid */
5788
5789
5790
5791/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5792FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5793{
5794 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5795 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5796}
5797
5798
5799/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5800FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5801{
5802 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5803 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5804}
5805
5806
5807/* Opcode 0xf3 0x0f 0x63 - invalid */
5808
5809
5810/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5811FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5812{
5813 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5814 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5815}
5816
5817
5818/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5819FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5820{
5821 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5822 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
5823}
5824
5825
5826/* Opcode 0xf3 0x0f 0x64 - invalid */
5827
5828
5829/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
5830FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
5831{
5832 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5833 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
5834}
5835
5836
5837/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
5838FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
5839{
5840 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5841 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
5842}
5843
5844
5845/* Opcode 0xf3 0x0f 0x65 - invalid */
5846
5847
5848/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
5849FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
5850{
5851 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5852 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
5853}
5854
5855
5856/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
5857FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
5858{
5859 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5860 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
5861}
5862
5863
5864/* Opcode 0xf3 0x0f 0x66 - invalid */
5865
5866
5867/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
5868FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
5869{
5870 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5871 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
5872}
5873
5874
5875/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
5876FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
5877{
5878 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5879 SSE2_OPT_BODY_FullFull_To_Full(packuswb, iemAImpl_packuswb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5880}
5881
5882
5883/* Opcode 0xf3 0x0f 0x67 - invalid */
5884
5885
5886/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
5887 * @note Intel and AMD both uses Qd for the second parameter, however they
5888 * both list it as a mmX/mem64 operand and intel describes it as being
5889 * loaded as a qword, so it should be Qq, shouldn't it? */
5890FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
5891{
5892 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5893 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
5894}
5895
5896
5897/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
5898FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
5899{
5900 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5901 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
5902}
5903
5904
5905/* Opcode 0xf3 0x0f 0x68 - invalid */
5906
5907
5908/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
5909 * @note Intel and AMD both uses Qd for the second parameter, however they
5910 * both list it as a mmX/mem64 operand and intel describes it as being
5911 * loaded as a qword, so it should be Qq, shouldn't it? */
5912FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
5913{
5914 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5915 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
5916}
5917
5918
5919/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
5920FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
5921{
5922 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5923 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
5924
5925}
5926
5927
5928/* Opcode 0xf3 0x0f 0x69 - invalid */
5929
5930
5931/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
5932 * @note Intel and AMD both uses Qd for the second parameter, however they
5933 * both list it as a mmX/mem64 operand and intel describes it as being
5934 * loaded as a qword, so it should be Qq, shouldn't it? */
5935FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
5936{
5937 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5938 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
5939}
5940
5941
5942/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
5943FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
5944{
5945 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5946 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
5947}
5948
5949
5950/* Opcode 0xf3 0x0f 0x6a - invalid */
5951
5952
5953/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
5954FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
5955{
5956 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5957 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
5958}
5959
5960
5961/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
5962FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
5963{
5964 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5965 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
5966}
5967
5968
5969/* Opcode 0xf3 0x0f 0x6b - invalid */
5970
5971
5972/* Opcode 0x0f 0x6c - invalid */
5973
5974
5975/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
5976FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
5977{
5978 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5979 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
5980}
5981
5982
5983/* Opcode 0xf3 0x0f 0x6c - invalid */
5984/* Opcode 0xf2 0x0f 0x6c - invalid */
5985
5986
5987/* Opcode 0x0f 0x6d - invalid */
5988
5989
5990/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
5991FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
5992{
5993 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5994 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
5995}
5996
5997
5998/* Opcode 0xf3 0x0f 0x6d - invalid */
5999
6000
6001FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6002{
6003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6004 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6005 {
6006 /**
6007 * @opcode 0x6e
6008 * @opcodesub rex.w=1
6009 * @oppfx none
6010 * @opcpuid mmx
6011 * @opgroup og_mmx_datamove
6012 * @opxcpttype 5
6013 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6014 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6015 */
6016 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6017 if (IEM_IS_MODRM_REG_MODE(bRm))
6018 {
6019 /* MMX, greg64 */
6020 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6022 IEM_MC_LOCAL(uint64_t, u64Tmp);
6023
6024 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6025 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6026 IEM_MC_FPU_TO_MMX_MODE();
6027
6028 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6029 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6030
6031 IEM_MC_ADVANCE_RIP_AND_FINISH();
6032 IEM_MC_END();
6033 }
6034 else
6035 {
6036 /* MMX, [mem64] */
6037 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6039 IEM_MC_LOCAL(uint64_t, u64Tmp);
6040
6041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6043 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6044 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6045
6046 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6047 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6048 IEM_MC_FPU_TO_MMX_MODE();
6049
6050 IEM_MC_ADVANCE_RIP_AND_FINISH();
6051 IEM_MC_END();
6052 }
6053 }
6054 else
6055 {
6056 /**
6057 * @opdone
6058 * @opcode 0x6e
6059 * @opcodesub rex.w=0
6060 * @oppfx none
6061 * @opcpuid mmx
6062 * @opgroup og_mmx_datamove
6063 * @opxcpttype 5
6064 * @opfunction iemOp_movd_q_Pd_Ey
6065 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6066 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6067 */
6068 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6069 if (IEM_IS_MODRM_REG_MODE(bRm))
6070 {
6071 /* MMX, greg32 */
6072 IEM_MC_BEGIN(0, 0);
6073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6074 IEM_MC_LOCAL(uint32_t, u32Tmp);
6075
6076 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6077 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6078 IEM_MC_FPU_TO_MMX_MODE();
6079
6080 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6081 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6082
6083 IEM_MC_ADVANCE_RIP_AND_FINISH();
6084 IEM_MC_END();
6085 }
6086 else
6087 {
6088 /* MMX, [mem32] */
6089 IEM_MC_BEGIN(0, 0);
6090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6091 IEM_MC_LOCAL(uint32_t, u32Tmp);
6092
6093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6095 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6096 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6097
6098 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6099 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6100 IEM_MC_FPU_TO_MMX_MODE();
6101
6102 IEM_MC_ADVANCE_RIP_AND_FINISH();
6103 IEM_MC_END();
6104 }
6105 }
6106}
6107
6108FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6109{
6110 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6111 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6112 {
6113 /**
6114 * @opcode 0x6e
6115 * @opcodesub rex.w=1
6116 * @oppfx 0x66
6117 * @opcpuid sse2
6118 * @opgroup og_sse2_simdint_datamove
6119 * @opxcpttype 5
6120 * @optest 64-bit / op1=1 op2=2 -> op1=2
6121 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6122 */
6123 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6124 if (IEM_IS_MODRM_REG_MODE(bRm))
6125 {
6126 /* XMM, greg64 */
6127 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6129 IEM_MC_LOCAL(uint64_t, u64Tmp);
6130
6131 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6132 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6133
6134 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6135 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6136
6137 IEM_MC_ADVANCE_RIP_AND_FINISH();
6138 IEM_MC_END();
6139 }
6140 else
6141 {
6142 /* XMM, [mem64] */
6143 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6145 IEM_MC_LOCAL(uint64_t, u64Tmp);
6146
6147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6149 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6150 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6151
6152 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6153 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6154
6155 IEM_MC_ADVANCE_RIP_AND_FINISH();
6156 IEM_MC_END();
6157 }
6158 }
6159 else
6160 {
6161 /**
6162 * @opdone
6163 * @opcode 0x6e
6164 * @opcodesub rex.w=0
6165 * @oppfx 0x66
6166 * @opcpuid sse2
6167 * @opgroup og_sse2_simdint_datamove
6168 * @opxcpttype 5
6169 * @opfunction iemOp_movd_q_Vy_Ey
6170 * @optest op1=1 op2=2 -> op1=2
6171 * @optest op1=0 op2=-42 -> op1=-42
6172 */
6173 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6174 if (IEM_IS_MODRM_REG_MODE(bRm))
6175 {
6176 /* XMM, greg32 */
6177 IEM_MC_BEGIN(0, 0);
6178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6179 IEM_MC_LOCAL(uint32_t, u32Tmp);
6180
6181 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6182 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6183
6184 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6185 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6186
6187 IEM_MC_ADVANCE_RIP_AND_FINISH();
6188 IEM_MC_END();
6189 }
6190 else
6191 {
6192 /* XMM, [mem32] */
6193 IEM_MC_BEGIN(0, 0);
6194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6195 IEM_MC_LOCAL(uint32_t, u32Tmp);
6196
6197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6199 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6200 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6201
6202 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6203 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6204
6205 IEM_MC_ADVANCE_RIP_AND_FINISH();
6206 IEM_MC_END();
6207 }
6208 }
6209}
6210
6211/* Opcode 0xf3 0x0f 0x6e - invalid */
6212
6213
6214/**
6215 * @opcode 0x6f
6216 * @oppfx none
6217 * @opcpuid mmx
6218 * @opgroup og_mmx_datamove
6219 * @opxcpttype 5
6220 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6221 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6222 */
6223FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6224{
6225 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6227 if (IEM_IS_MODRM_REG_MODE(bRm))
6228 {
6229 /*
6230 * Register, register.
6231 */
6232 IEM_MC_BEGIN(0, 0);
6233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6234 IEM_MC_LOCAL(uint64_t, u64Tmp);
6235
6236 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6237 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6238 IEM_MC_FPU_TO_MMX_MODE();
6239
6240 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6241 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6242
6243 IEM_MC_ADVANCE_RIP_AND_FINISH();
6244 IEM_MC_END();
6245 }
6246 else
6247 {
6248 /*
6249 * Register, memory.
6250 */
6251 IEM_MC_BEGIN(0, 0);
6252 IEM_MC_LOCAL(uint64_t, u64Tmp);
6253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6254
6255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6257 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6258 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6259
6260 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6261 IEM_MC_FPU_TO_MMX_MODE();
6262
6263 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6264
6265 IEM_MC_ADVANCE_RIP_AND_FINISH();
6266 IEM_MC_END();
6267 }
6268}
6269
6270/**
6271 * @opcode 0x6f
6272 * @oppfx 0x66
6273 * @opcpuid sse2
6274 * @opgroup og_sse2_simdint_datamove
6275 * @opxcpttype 1
6276 * @optest op1=1 op2=2 -> op1=2
6277 * @optest op1=0 op2=-42 -> op1=-42
6278 */
6279FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6280{
6281 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6282 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6283 if (IEM_IS_MODRM_REG_MODE(bRm))
6284 {
6285 /*
6286 * Register, register.
6287 */
6288 IEM_MC_BEGIN(0, 0);
6289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6290
6291 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6292 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6293
6294 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6295 IEM_GET_MODRM_RM(pVCpu, bRm));
6296 IEM_MC_ADVANCE_RIP_AND_FINISH();
6297 IEM_MC_END();
6298 }
6299 else
6300 {
6301 /*
6302 * Register, memory.
6303 */
6304 IEM_MC_BEGIN(0, 0);
6305 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6307
6308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6310 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6312
6313 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6314 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6315
6316 IEM_MC_ADVANCE_RIP_AND_FINISH();
6317 IEM_MC_END();
6318 }
6319}
6320
6321/**
6322 * @opcode 0x6f
6323 * @oppfx 0xf3
6324 * @opcpuid sse2
6325 * @opgroup og_sse2_simdint_datamove
6326 * @opxcpttype 4UA
6327 * @optest op1=1 op2=2 -> op1=2
6328 * @optest op1=0 op2=-42 -> op1=-42
6329 */
6330FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6331{
6332 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6333 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6334 if (IEM_IS_MODRM_REG_MODE(bRm))
6335 {
6336 /*
6337 * Register, register.
6338 */
6339 IEM_MC_BEGIN(0, 0);
6340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6341 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6342 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6343 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6344 IEM_GET_MODRM_RM(pVCpu, bRm));
6345 IEM_MC_ADVANCE_RIP_AND_FINISH();
6346 IEM_MC_END();
6347 }
6348 else
6349 {
6350 /*
6351 * Register, memory.
6352 */
6353 IEM_MC_BEGIN(0, 0);
6354 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6356
6357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6359 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6360 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6361 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6362 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6363
6364 IEM_MC_ADVANCE_RIP_AND_FINISH();
6365 IEM_MC_END();
6366 }
6367}
6368
6369
6370/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6371FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6372{
6373 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6375 if (IEM_IS_MODRM_REG_MODE(bRm))
6376 {
6377 /*
6378 * Register, register.
6379 */
6380 IEM_MC_BEGIN(0, 0);
6381 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6383 IEM_MC_ARG(uint64_t *, pDst, 0);
6384 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6385 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6386 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6387 IEM_MC_PREPARE_FPU_USAGE();
6388 IEM_MC_FPU_TO_MMX_MODE();
6389
6390 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6391 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6392 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6393 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6394
6395 IEM_MC_ADVANCE_RIP_AND_FINISH();
6396 IEM_MC_END();
6397 }
6398 else
6399 {
6400 /*
6401 * Register, memory.
6402 */
6403 IEM_MC_BEGIN(0, 0);
6404 IEM_MC_ARG(uint64_t *, pDst, 0);
6405 IEM_MC_LOCAL(uint64_t, uSrc);
6406 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6408
6409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6410 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6411 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6413 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6414 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6415
6416 IEM_MC_PREPARE_FPU_USAGE();
6417 IEM_MC_FPU_TO_MMX_MODE();
6418
6419 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6420 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6421 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6422
6423 IEM_MC_ADVANCE_RIP_AND_FINISH();
6424 IEM_MC_END();
6425 }
6426}
6427
6428
6429/**
6430 * Common worker for SSE2 instructions on the forms:
6431 * pshufd xmm1, xmm2/mem128, imm8
6432 * pshufhw xmm1, xmm2/mem128, imm8
6433 * pshuflw xmm1, xmm2/mem128, imm8
6434 *
6435 * Proper alignment of the 128-bit operand is enforced.
6436 * Exceptions type 4. SSE2 cpuid checks.
6437 */
6438FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6439{
6440 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6441 if (IEM_IS_MODRM_REG_MODE(bRm))
6442 {
6443 /*
6444 * Register, register.
6445 */
6446 IEM_MC_BEGIN(0, 0);
6447 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6449 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6450 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6451 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6452 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6453 IEM_MC_PREPARE_SSE_USAGE();
6454 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6455 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6456 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6457 IEM_MC_ADVANCE_RIP_AND_FINISH();
6458 IEM_MC_END();
6459 }
6460 else
6461 {
6462 /*
6463 * Register, memory.
6464 */
6465 IEM_MC_BEGIN(0, 0);
6466 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6467 IEM_MC_LOCAL(RTUINT128U, uSrc);
6468 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6470
6471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6472 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6473 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6475 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6476
6477 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6478 IEM_MC_PREPARE_SSE_USAGE();
6479 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6480 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6481
6482 IEM_MC_ADVANCE_RIP_AND_FINISH();
6483 IEM_MC_END();
6484 }
6485}
6486
6487
6488/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6489FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6490{
6491 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6492 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6493}
6494
6495
6496/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6497FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6498{
6499 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6500 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6501}
6502
6503
6504/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6505FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6506{
6507 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6508 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6509}
6510
6511
6512/**
6513 * Common worker for MMX instructions of the form:
6514 * psrlw mm, imm8
6515 * psraw mm, imm8
6516 * psllw mm, imm8
6517 * psrld mm, imm8
6518 * psrad mm, imm8
6519 * pslld mm, imm8
6520 * psrlq mm, imm8
6521 * psllq mm, imm8
6522 *
6523 */
6524FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6525{
6526 if (IEM_IS_MODRM_REG_MODE(bRm))
6527 {
6528 /*
6529 * Register, immediate.
6530 */
6531 IEM_MC_BEGIN(0, 0);
6532 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6534 IEM_MC_ARG(uint64_t *, pDst, 0);
6535 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6536 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6537 IEM_MC_PREPARE_FPU_USAGE();
6538 IEM_MC_FPU_TO_MMX_MODE();
6539
6540 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6541 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6542 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6543
6544 IEM_MC_ADVANCE_RIP_AND_FINISH();
6545 IEM_MC_END();
6546 }
6547 else
6548 {
6549 /*
6550 * Register, memory not supported.
6551 */
6552 /// @todo Caller already enforced register mode?!
6553 AssertFailedReturn(VINF_SUCCESS);
6554 }
6555}
6556
6557
6558#if 0 /*unused*/
6559/**
6560 * Common worker for SSE2 instructions of the form:
6561 * psrlw xmm, imm8
6562 * psraw xmm, imm8
6563 * psllw xmm, imm8
6564 * psrld xmm, imm8
6565 * psrad xmm, imm8
6566 * pslld xmm, imm8
6567 * psrlq xmm, imm8
6568 * psllq xmm, imm8
6569 *
6570 */
6571FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6572{
6573 if (IEM_IS_MODRM_REG_MODE(bRm))
6574 {
6575 /*
6576 * Register, immediate.
6577 */
6578 IEM_MC_BEGIN(0, 0);
6579 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6581 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6582 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6583 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6584 IEM_MC_PREPARE_SSE_USAGE();
6585 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6586 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6587 IEM_MC_ADVANCE_RIP_AND_FINISH();
6588 IEM_MC_END();
6589 }
6590 else
6591 {
6592 /*
6593 * Register, memory.
6594 */
6595 /// @todo Caller already enforced register mode?!
6596 AssertFailedReturn(VINF_SUCCESS);
6597 }
6598}
6599#endif
6600
6601
6602/**
6603 * Preprocessor macro variant of iemOpCommonSse2_Shift_Imm
6604 */
6605#define SSE2_SHIFT_BODY_Imm(a_Ins, a_bRm, a_fRegNativeArchs) \
6606 if (IEM_IS_MODRM_REG_MODE((a_bRm))) \
6607 { \
6608 /* \
6609 * Register, immediate. \
6610 */ \
6611 IEM_MC_BEGIN(0, 0); \
6612 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
6613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2); \
6614 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
6615 IEM_MC_PREPARE_SSE_USAGE(); \
6616 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
6617 IEM_MC_NATIVE_EMIT_2(RT_CONCAT3(iemNativeEmit_,a_Ins,_ri_u128), IEM_GET_MODRM_RM(pVCpu, (a_bRm)), bImm); \
6618 } IEM_MC_NATIVE_ELSE() { \
6619 IEM_MC_ARG(PRTUINT128U, pDst, 0); \
6620 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1); \
6621 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, (a_bRm))); \
6622 IEM_MC_CALL_VOID_AIMPL_2(RT_CONCAT3(iemAImpl_,a_Ins,_imm_u128), pDst, bShiftArg); \
6623 } IEM_MC_NATIVE_ENDIF(); \
6624 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6625 IEM_MC_END(); \
6626 } \
6627 else \
6628 { \
6629 /* \
6630 * Register, memory. \
6631 */ \
6632 AssertFailedReturn(VINF_SUCCESS); \
6633 } (void)0
6634
6635
6636/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6637FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6638{
6639// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6640 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6641}
6642
6643
6644/** Opcode 0x66 0x0f 0x71 11/2. */
6645FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6646{
6647// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6648 SSE2_SHIFT_BODY_Imm(psrlw, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6649}
6650
6651
6652/** Opcode 0x0f 0x71 11/4. */
6653FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6654{
6655// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6656 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6657}
6658
6659
6660/** Opcode 0x66 0x0f 0x71 11/4. */
6661FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6662{
6663// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6664 SSE2_SHIFT_BODY_Imm(psraw, bRm, 0);
6665}
6666
6667
6668/** Opcode 0x0f 0x71 11/6. */
6669FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6670{
6671// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6672 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6673}
6674
6675
6676/** Opcode 0x66 0x0f 0x71 11/6. */
6677FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6678{
6679// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6680 SSE2_SHIFT_BODY_Imm(psllw, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6681}
6682
6683
6684/**
6685 * Group 12 jump table for register variant.
6686 */
6687IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6688{
6689 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6690 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6691 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6692 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6693 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6694 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6695 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6696 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6697};
6698AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6699
6700
6701/** Opcode 0x0f 0x71. */
6702FNIEMOP_DEF(iemOp_Grp12)
6703{
6704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6705 if (IEM_IS_MODRM_REG_MODE(bRm))
6706 /* register, register */
6707 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6708 + pVCpu->iem.s.idxPrefix], bRm);
6709 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6710}
6711
6712
6713/** Opcode 0x0f 0x72 11/2. */
6714FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6715{
6716// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6717 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6718}
6719
6720
6721/** Opcode 0x66 0x0f 0x72 11/2. */
6722FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6723{
6724// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6725 SSE2_SHIFT_BODY_Imm(psrld, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6726}
6727
6728
6729/** Opcode 0x0f 0x72 11/4. */
6730FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6731{
6732// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6733 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6734}
6735
6736
6737/** Opcode 0x66 0x0f 0x72 11/4. */
6738FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6739{
6740// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6741 SSE2_SHIFT_BODY_Imm(psrad, bRm, 0);
6742}
6743
6744
6745/** Opcode 0x0f 0x72 11/6. */
6746FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6747{
6748// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6749 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6750}
6751
6752/** Opcode 0x66 0x0f 0x72 11/6. */
6753FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6754{
6755// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6756 SSE2_SHIFT_BODY_Imm(pslld, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6757}
6758
6759
6760/**
6761 * Group 13 jump table for register variant.
6762 */
6763IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6764{
6765 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6766 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6767 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6768 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6769 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6770 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6771 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6772 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6773};
6774AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6775
6776/** Opcode 0x0f 0x72. */
6777FNIEMOP_DEF(iemOp_Grp13)
6778{
6779 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6780 if (IEM_IS_MODRM_REG_MODE(bRm))
6781 /* register, register */
6782 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6783 + pVCpu->iem.s.idxPrefix], bRm);
6784 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6785}
6786
6787
6788/** Opcode 0x0f 0x73 11/2. */
6789FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6790{
6791// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6792 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6793}
6794
6795
6796/** Opcode 0x66 0x0f 0x73 11/2. */
6797FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6798{
6799// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6800 SSE2_SHIFT_BODY_Imm(psrlq, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6801}
6802
6803
6804/** Opcode 0x66 0x0f 0x73 11/3. */
6805FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6806{
6807// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6808 SSE2_SHIFT_BODY_Imm(psrldq, bRm, 0);
6809}
6810
6811
6812/** Opcode 0x0f 0x73 11/6. */
6813FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6814{
6815// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6816 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6817}
6818
6819
6820/** Opcode 0x66 0x0f 0x73 11/6. */
6821FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6822{
6823// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6824 SSE2_SHIFT_BODY_Imm(psllq, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6825}
6826
6827
6828/** Opcode 0x66 0x0f 0x73 11/7. */
6829FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6830{
6831// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6832 SSE2_SHIFT_BODY_Imm(pslldq, bRm, 0);
6833}
6834
6835/**
6836 * Group 14 jump table for register variant.
6837 */
6838IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6839{
6840 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6841 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6842 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6843 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6844 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6845 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6846 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6847 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6848};
6849AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6850
6851
6852/** Opcode 0x0f 0x73. */
6853FNIEMOP_DEF(iemOp_Grp14)
6854{
6855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6856 if (IEM_IS_MODRM_REG_MODE(bRm))
6857 /* register, register */
6858 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6859 + pVCpu->iem.s.idxPrefix], bRm);
6860 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6861}
6862
6863
6864/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
6865FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
6866{
6867 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6868 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
6869}
6870
6871
6872/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
6873FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
6874{
6875 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6876 SSE2_OPT_BODY_FullFull_To_Full(pcmpeqb, iemAImpl_pcmpeqb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6877}
6878
6879
6880/* Opcode 0xf3 0x0f 0x74 - invalid */
6881/* Opcode 0xf2 0x0f 0x74 - invalid */
6882
6883
6884/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
6885FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
6886{
6887 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6888 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
6889}
6890
6891
6892/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
6893FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
6894{
6895 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6896 SSE2_OPT_BODY_FullFull_To_Full(pcmpeqw, iemAImpl_pcmpeqw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6897}
6898
6899
6900/* Opcode 0xf3 0x0f 0x75 - invalid */
6901/* Opcode 0xf2 0x0f 0x75 - invalid */
6902
6903
6904/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
6905FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
6906{
6907 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6908 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
6909}
6910
6911
6912/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
6913FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
6914{
6915 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6916 SSE2_OPT_BODY_FullFull_To_Full(pcmpeqd, iemAImpl_pcmpeqd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6917}
6918
6919
6920/* Opcode 0xf3 0x0f 0x76 - invalid */
6921/* Opcode 0xf2 0x0f 0x76 - invalid */
6922
6923
6924/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
6925FNIEMOP_DEF(iemOp_emms)
6926{
6927 IEMOP_MNEMONIC(emms, "emms");
6928 IEM_MC_BEGIN(0, 0);
6929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6930 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6931 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6932 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6933 IEM_MC_FPU_FROM_MMX_MODE();
6934 IEM_MC_ADVANCE_RIP_AND_FINISH();
6935 IEM_MC_END();
6936}
6937
6938/* Opcode 0x66 0x0f 0x77 - invalid */
6939/* Opcode 0xf3 0x0f 0x77 - invalid */
6940/* Opcode 0xf2 0x0f 0x77 - invalid */
6941
6942/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
6943#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6944FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
6945{
6946 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
6947 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
6948 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
6949 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
6950
6951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6952 if (IEM_IS_MODRM_REG_MODE(bRm))
6953 {
6954 /*
6955 * Register, register.
6956 */
6957 if (enmEffOpSize == IEMMODE_64BIT)
6958 {
6959 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6960 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6961 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6962 IEM_MC_ARG(uint64_t, u64Enc, 1);
6963 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6964 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6965 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
6966 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
6967 iemCImpl_vmread_reg64, pu64Dst, u64Enc);
6968 IEM_MC_END();
6969 }
6970 else
6971 {
6972 IEM_MC_BEGIN(0, 0);
6973 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6974 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6975 IEM_MC_ARG(uint32_t, u32Enc, 1);
6976 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6977 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6978 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
6979 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
6980 iemCImpl_vmread_reg32, pu64Dst, u32Enc);
6981 IEM_MC_END();
6982 }
6983 }
6984 else
6985 {
6986 /*
6987 * Memory, register.
6988 */
6989 if (enmEffOpSize == IEMMODE_64BIT)
6990 {
6991 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6992 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6993 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6994 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6995 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
6996 IEM_MC_ARG(uint64_t, u64Enc, 2);
6997 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6998 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
6999 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7000 IEM_MC_END();
7001 }
7002 else
7003 {
7004 IEM_MC_BEGIN(0, 0);
7005 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7007 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7008 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7009 IEM_MC_ARG(uint32_t, u32Enc, 2);
7010 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7011 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7012 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7013 IEM_MC_END();
7014 }
7015 }
7016}
7017#else
7018FNIEMOP_UD_STUB(iemOp_vmread_Ey_Gy);
7019#endif
7020
7021/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7022FNIEMOP_STUB(iemOp_AmdGrp17);
7023/* Opcode 0xf3 0x0f 0x78 - invalid */
7024/* Opcode 0xf2 0x0f 0x78 - invalid */
7025
7026/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7027#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7028FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7029{
7030 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7031 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7032 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7033 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7034
7035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7036 if (IEM_IS_MODRM_REG_MODE(bRm))
7037 {
7038 /*
7039 * Register, register.
7040 */
7041 if (enmEffOpSize == IEMMODE_64BIT)
7042 {
7043 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7044 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7045 IEM_MC_ARG(uint64_t, u64Val, 0);
7046 IEM_MC_ARG(uint64_t, u64Enc, 1);
7047 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7048 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7049 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u64Val, u64Enc);
7050 IEM_MC_END();
7051 }
7052 else
7053 {
7054 IEM_MC_BEGIN(0, 0);
7055 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7056 IEM_MC_ARG(uint32_t, u32Val, 0);
7057 IEM_MC_ARG(uint32_t, u32Enc, 1);
7058 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7059 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7060 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u32Val, u32Enc);
7061 IEM_MC_END();
7062 }
7063 }
7064 else
7065 {
7066 /*
7067 * Register, memory.
7068 */
7069 if (enmEffOpSize == IEMMODE_64BIT)
7070 {
7071 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7072 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7074 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7075 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7076 IEM_MC_ARG(uint64_t, u64Enc, 2);
7077 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7078 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7079 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7080 IEM_MC_END();
7081 }
7082 else
7083 {
7084 IEM_MC_BEGIN(0, 0);
7085 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7087 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7088 IEM_MC_ARG(uint32_t, u32Enc, 2);
7089 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7090 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7091 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7092 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7093 IEM_MC_END();
7094 }
7095 }
7096}
7097#else
7098FNIEMOP_UD_STUB(iemOp_vmwrite_Gy_Ey);
7099#endif
7100/* Opcode 0x66 0x0f 0x79 - invalid */
7101/* Opcode 0xf3 0x0f 0x79 - invalid */
7102/* Opcode 0xf2 0x0f 0x79 - invalid */
7103
7104/* Opcode 0x0f 0x7a - invalid */
7105/* Opcode 0x66 0x0f 0x7a - invalid */
7106/* Opcode 0xf3 0x0f 0x7a - invalid */
7107/* Opcode 0xf2 0x0f 0x7a - invalid */
7108
7109/* Opcode 0x0f 0x7b - invalid */
7110/* Opcode 0x66 0x0f 0x7b - invalid */
7111/* Opcode 0xf3 0x0f 0x7b - invalid */
7112/* Opcode 0xf2 0x0f 0x7b - invalid */
7113
7114/* Opcode 0x0f 0x7c - invalid */
7115
7116
7117/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7118FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7119{
7120 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7121 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7122}
7123
7124
7125/* Opcode 0xf3 0x0f 0x7c - invalid */
7126
7127
7128/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7129FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7130{
7131 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7132 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7133}
7134
7135
7136/* Opcode 0x0f 0x7d - invalid */
7137
7138
7139/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7140FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7141{
7142 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7143 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7144}
7145
7146
7147/* Opcode 0xf3 0x0f 0x7d - invalid */
7148
7149
7150/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7151FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7152{
7153 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7154 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7155}
7156
7157
7158/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7159FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7160{
7161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7162 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7163 {
7164 /**
7165 * @opcode 0x7e
7166 * @opcodesub rex.w=1
7167 * @oppfx none
7168 * @opcpuid mmx
7169 * @opgroup og_mmx_datamove
7170 * @opxcpttype 5
7171 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7172 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7173 */
7174 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7175 if (IEM_IS_MODRM_REG_MODE(bRm))
7176 {
7177 /* greg64, MMX */
7178 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7180 IEM_MC_LOCAL(uint64_t, u64Tmp);
7181
7182 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7183 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7184 IEM_MC_FPU_TO_MMX_MODE();
7185
7186 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7187 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7188
7189 IEM_MC_ADVANCE_RIP_AND_FINISH();
7190 IEM_MC_END();
7191 }
7192 else
7193 {
7194 /* [mem64], MMX */
7195 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7197 IEM_MC_LOCAL(uint64_t, u64Tmp);
7198
7199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7201 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7202 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7203
7204 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7205 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7206 IEM_MC_FPU_TO_MMX_MODE();
7207
7208 IEM_MC_ADVANCE_RIP_AND_FINISH();
7209 IEM_MC_END();
7210 }
7211 }
7212 else
7213 {
7214 /**
7215 * @opdone
7216 * @opcode 0x7e
7217 * @opcodesub rex.w=0
7218 * @oppfx none
7219 * @opcpuid mmx
7220 * @opgroup og_mmx_datamove
7221 * @opxcpttype 5
7222 * @opfunction iemOp_movd_q_Pd_Ey
7223 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7224 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7225 */
7226 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7227 if (IEM_IS_MODRM_REG_MODE(bRm))
7228 {
7229 /* greg32, MMX */
7230 IEM_MC_BEGIN(0, 0);
7231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7232 IEM_MC_LOCAL(uint32_t, u32Tmp);
7233
7234 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7235 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7236 IEM_MC_FPU_TO_MMX_MODE();
7237
7238 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm), 0);
7239 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7240
7241 IEM_MC_ADVANCE_RIP_AND_FINISH();
7242 IEM_MC_END();
7243 }
7244 else
7245 {
7246 /* [mem32], MMX */
7247 IEM_MC_BEGIN(0, 0);
7248 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7249 IEM_MC_LOCAL(uint32_t, u32Tmp);
7250
7251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7253 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7254 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7255
7256 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm), 0);
7257 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7258 IEM_MC_FPU_TO_MMX_MODE();
7259
7260 IEM_MC_ADVANCE_RIP_AND_FINISH();
7261 IEM_MC_END();
7262 }
7263 }
7264}
7265
7266
7267FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7268{
7269 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7270 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7271 {
7272 /**
7273 * @opcode 0x7e
7274 * @opcodesub rex.w=1
7275 * @oppfx 0x66
7276 * @opcpuid sse2
7277 * @opgroup og_sse2_simdint_datamove
7278 * @opxcpttype 5
7279 * @optest 64-bit / op1=1 op2=2 -> op1=2
7280 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7281 */
7282 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7283 if (IEM_IS_MODRM_REG_MODE(bRm))
7284 {
7285 /* greg64, XMM */
7286 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7288 IEM_MC_LOCAL(uint64_t, u64Tmp);
7289
7290 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7291 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7292
7293 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7294 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7295
7296 IEM_MC_ADVANCE_RIP_AND_FINISH();
7297 IEM_MC_END();
7298 }
7299 else
7300 {
7301 /* [mem64], XMM */
7302 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7304 IEM_MC_LOCAL(uint64_t, u64Tmp);
7305
7306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7308 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7309 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7310
7311 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7312 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7313
7314 IEM_MC_ADVANCE_RIP_AND_FINISH();
7315 IEM_MC_END();
7316 }
7317 }
7318 else
7319 {
7320 /**
7321 * @opdone
7322 * @opcode 0x7e
7323 * @opcodesub rex.w=0
7324 * @oppfx 0x66
7325 * @opcpuid sse2
7326 * @opgroup og_sse2_simdint_datamove
7327 * @opxcpttype 5
7328 * @opfunction iemOp_movd_q_Vy_Ey
7329 * @optest op1=1 op2=2 -> op1=2
7330 * @optest op1=0 op2=-42 -> op1=-42
7331 */
7332 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7333 if (IEM_IS_MODRM_REG_MODE(bRm))
7334 {
7335 /* greg32, XMM */
7336 IEM_MC_BEGIN(0, 0);
7337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7338 IEM_MC_LOCAL(uint32_t, u32Tmp);
7339
7340 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7341 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7342
7343 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7344 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7345
7346 IEM_MC_ADVANCE_RIP_AND_FINISH();
7347 IEM_MC_END();
7348 }
7349 else
7350 {
7351 /* [mem32], XMM */
7352 IEM_MC_BEGIN(0, 0);
7353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7354 IEM_MC_LOCAL(uint32_t, u32Tmp);
7355
7356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7358 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7359 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7360
7361 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7362 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7363
7364 IEM_MC_ADVANCE_RIP_AND_FINISH();
7365 IEM_MC_END();
7366 }
7367 }
7368}
7369
7370/**
7371 * @opcode 0x7e
7372 * @oppfx 0xf3
7373 * @opcpuid sse2
7374 * @opgroup og_sse2_pcksclr_datamove
7375 * @opxcpttype none
7376 * @optest op1=1 op2=2 -> op1=2
7377 * @optest op1=0 op2=-42 -> op1=-42
7378 */
7379FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7380{
7381 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7382 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7383 if (IEM_IS_MODRM_REG_MODE(bRm))
7384 {
7385 /*
7386 * XMM128, XMM64.
7387 */
7388 IEM_MC_BEGIN(0, 0);
7389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7390 IEM_MC_LOCAL(uint64_t, uSrc);
7391
7392 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7393 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7394
7395 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7396 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7397
7398 IEM_MC_ADVANCE_RIP_AND_FINISH();
7399 IEM_MC_END();
7400 }
7401 else
7402 {
7403 /*
7404 * XMM128, [mem64].
7405 */
7406 IEM_MC_BEGIN(0, 0);
7407 IEM_MC_LOCAL(uint64_t, uSrc);
7408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7409
7410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7412 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7413 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7414
7415 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7416 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7417
7418 IEM_MC_ADVANCE_RIP_AND_FINISH();
7419 IEM_MC_END();
7420 }
7421}
7422
7423/* Opcode 0xf2 0x0f 0x7e - invalid */
7424
7425
7426/** Opcode 0x0f 0x7f - movq Qq, Pq */
7427FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7428{
7429 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7430 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7431 if (IEM_IS_MODRM_REG_MODE(bRm))
7432 {
7433 /*
7434 * MMX, MMX.
7435 */
7436 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7437 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7438 IEM_MC_BEGIN(0, 0);
7439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7440 IEM_MC_LOCAL(uint64_t, u64Tmp);
7441 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7442 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7443 IEM_MC_FPU_TO_MMX_MODE();
7444
7445 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7446 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7447
7448 IEM_MC_ADVANCE_RIP_AND_FINISH();
7449 IEM_MC_END();
7450 }
7451 else
7452 {
7453 /*
7454 * [mem64], MMX.
7455 */
7456 IEM_MC_BEGIN(0, 0);
7457 IEM_MC_LOCAL(uint64_t, u64Tmp);
7458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7459
7460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7462 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7463 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7464
7465 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7466 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7467 IEM_MC_FPU_TO_MMX_MODE();
7468
7469 IEM_MC_ADVANCE_RIP_AND_FINISH();
7470 IEM_MC_END();
7471 }
7472}
7473
7474/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7475FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7476{
7477 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7478 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7479 if (IEM_IS_MODRM_REG_MODE(bRm))
7480 {
7481 /*
7482 * XMM, XMM.
7483 */
7484 IEM_MC_BEGIN(0, 0);
7485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7486 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7487 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7488 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7489 IEM_GET_MODRM_REG(pVCpu, bRm));
7490 IEM_MC_ADVANCE_RIP_AND_FINISH();
7491 IEM_MC_END();
7492 }
7493 else
7494 {
7495 /*
7496 * [mem128], XMM.
7497 */
7498 IEM_MC_BEGIN(0, 0);
7499 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7501
7502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7504 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7505 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7506
7507 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7508 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7509
7510 IEM_MC_ADVANCE_RIP_AND_FINISH();
7511 IEM_MC_END();
7512 }
7513}
7514
7515/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7516FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7517{
7518 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7520 if (IEM_IS_MODRM_REG_MODE(bRm))
7521 {
7522 /*
7523 * XMM, XMM.
7524 */
7525 IEM_MC_BEGIN(0, 0);
7526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7527 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7528 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7529 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7530 IEM_GET_MODRM_REG(pVCpu, bRm));
7531 IEM_MC_ADVANCE_RIP_AND_FINISH();
7532 IEM_MC_END();
7533 }
7534 else
7535 {
7536 /*
7537 * [mem128], XMM.
7538 */
7539 IEM_MC_BEGIN(0, 0);
7540 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7542
7543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7545 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7546 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7547
7548 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7549 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7550
7551 IEM_MC_ADVANCE_RIP_AND_FINISH();
7552 IEM_MC_END();
7553 }
7554}
7555
7556/* Opcode 0xf2 0x0f 0x7f - invalid */
7557
7558
7559/**
7560 * @opcode 0x80
7561 * @opfltest of
7562 */
7563FNIEMOP_DEF(iemOp_jo_Jv)
7564{
7565 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7566 IEMOP_HLP_MIN_386();
7567 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7568 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7569 {
7570 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7571 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7573 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7574 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7575 } IEM_MC_ELSE() {
7576 IEM_MC_ADVANCE_RIP_AND_FINISH();
7577 } IEM_MC_ENDIF();
7578 IEM_MC_END();
7579 }
7580 else
7581 {
7582 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7583 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7585 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7586 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7587 } IEM_MC_ELSE() {
7588 IEM_MC_ADVANCE_RIP_AND_FINISH();
7589 } IEM_MC_ENDIF();
7590 IEM_MC_END();
7591 }
7592}
7593
7594
7595/**
7596 * @opcode 0x81
7597 * @opfltest of
7598 */
7599FNIEMOP_DEF(iemOp_jno_Jv)
7600{
7601 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7602 IEMOP_HLP_MIN_386();
7603 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7604 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7605 {
7606 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7607 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7609 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7610 IEM_MC_ADVANCE_RIP_AND_FINISH();
7611 } IEM_MC_ELSE() {
7612 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7613 } IEM_MC_ENDIF();
7614 IEM_MC_END();
7615 }
7616 else
7617 {
7618 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7619 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7621 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7622 IEM_MC_ADVANCE_RIP_AND_FINISH();
7623 } IEM_MC_ELSE() {
7624 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7625 } IEM_MC_ENDIF();
7626 IEM_MC_END();
7627 }
7628}
7629
7630
7631/**
7632 * @opcode 0x82
7633 * @opfltest cf
7634 */
7635FNIEMOP_DEF(iemOp_jc_Jv)
7636{
7637 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7638 IEMOP_HLP_MIN_386();
7639 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7640 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7641 {
7642 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7643 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7645 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7646 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7647 } IEM_MC_ELSE() {
7648 IEM_MC_ADVANCE_RIP_AND_FINISH();
7649 } IEM_MC_ENDIF();
7650 IEM_MC_END();
7651 }
7652 else
7653 {
7654 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7655 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7657 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7658 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7659 } IEM_MC_ELSE() {
7660 IEM_MC_ADVANCE_RIP_AND_FINISH();
7661 } IEM_MC_ENDIF();
7662 IEM_MC_END();
7663 }
7664}
7665
7666
7667/**
7668 * @opcode 0x83
7669 * @opfltest cf
7670 */
7671FNIEMOP_DEF(iemOp_jnc_Jv)
7672{
7673 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7674 IEMOP_HLP_MIN_386();
7675 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7676 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7677 {
7678 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7679 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7681 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7682 IEM_MC_ADVANCE_RIP_AND_FINISH();
7683 } IEM_MC_ELSE() {
7684 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7685 } IEM_MC_ENDIF();
7686 IEM_MC_END();
7687 }
7688 else
7689 {
7690 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7691 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7693 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7694 IEM_MC_ADVANCE_RIP_AND_FINISH();
7695 } IEM_MC_ELSE() {
7696 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7697 } IEM_MC_ENDIF();
7698 IEM_MC_END();
7699 }
7700}
7701
7702
7703/**
7704 * @opcode 0x84
7705 * @opfltest zf
7706 */
7707FNIEMOP_DEF(iemOp_je_Jv)
7708{
7709 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7710 IEMOP_HLP_MIN_386();
7711 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7712 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7713 {
7714 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7715 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7717 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7718 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7719 } IEM_MC_ELSE() {
7720 IEM_MC_ADVANCE_RIP_AND_FINISH();
7721 } IEM_MC_ENDIF();
7722 IEM_MC_END();
7723 }
7724 else
7725 {
7726 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7727 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7729 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7730 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7731 } IEM_MC_ELSE() {
7732 IEM_MC_ADVANCE_RIP_AND_FINISH();
7733 } IEM_MC_ENDIF();
7734 IEM_MC_END();
7735 }
7736}
7737
7738
7739/**
7740 * @opcode 0x85
7741 * @opfltest zf
7742 */
7743FNIEMOP_DEF(iemOp_jne_Jv)
7744{
7745 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7746 IEMOP_HLP_MIN_386();
7747 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7748 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7749 {
7750 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7751 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7753 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7754 IEM_MC_ADVANCE_RIP_AND_FINISH();
7755 } IEM_MC_ELSE() {
7756 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7757 } IEM_MC_ENDIF();
7758 IEM_MC_END();
7759 }
7760 else
7761 {
7762 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7763 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7765 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7766 IEM_MC_ADVANCE_RIP_AND_FINISH();
7767 } IEM_MC_ELSE() {
7768 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7769 } IEM_MC_ENDIF();
7770 IEM_MC_END();
7771 }
7772}
7773
7774
7775/**
7776 * @opcode 0x86
7777 * @opfltest cf,zf
7778 */
7779FNIEMOP_DEF(iemOp_jbe_Jv)
7780{
7781 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7782 IEMOP_HLP_MIN_386();
7783 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7784 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7785 {
7786 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7787 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7789 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7790 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7791 } IEM_MC_ELSE() {
7792 IEM_MC_ADVANCE_RIP_AND_FINISH();
7793 } IEM_MC_ENDIF();
7794 IEM_MC_END();
7795 }
7796 else
7797 {
7798 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7799 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7801 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7802 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7803 } IEM_MC_ELSE() {
7804 IEM_MC_ADVANCE_RIP_AND_FINISH();
7805 } IEM_MC_ENDIF();
7806 IEM_MC_END();
7807 }
7808}
7809
7810
7811/**
7812 * @opcode 0x87
7813 * @opfltest cf,zf
7814 */
7815FNIEMOP_DEF(iemOp_jnbe_Jv)
7816{
7817 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7818 IEMOP_HLP_MIN_386();
7819 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7820 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7821 {
7822 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7823 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7825 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7826 IEM_MC_ADVANCE_RIP_AND_FINISH();
7827 } IEM_MC_ELSE() {
7828 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7829 } IEM_MC_ENDIF();
7830 IEM_MC_END();
7831 }
7832 else
7833 {
7834 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7835 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7837 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7838 IEM_MC_ADVANCE_RIP_AND_FINISH();
7839 } IEM_MC_ELSE() {
7840 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7841 } IEM_MC_ENDIF();
7842 IEM_MC_END();
7843 }
7844}
7845
7846
7847/**
7848 * @opcode 0x88
7849 * @opfltest sf
7850 */
7851FNIEMOP_DEF(iemOp_js_Jv)
7852{
7853 IEMOP_MNEMONIC(js_Jv, "js Jv");
7854 IEMOP_HLP_MIN_386();
7855 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7856 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7857 {
7858 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7859 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7861 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7862 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7863 } IEM_MC_ELSE() {
7864 IEM_MC_ADVANCE_RIP_AND_FINISH();
7865 } IEM_MC_ENDIF();
7866 IEM_MC_END();
7867 }
7868 else
7869 {
7870 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7871 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7873 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7874 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7875 } IEM_MC_ELSE() {
7876 IEM_MC_ADVANCE_RIP_AND_FINISH();
7877 } IEM_MC_ENDIF();
7878 IEM_MC_END();
7879 }
7880}
7881
7882
7883/**
7884 * @opcode 0x89
7885 * @opfltest sf
7886 */
7887FNIEMOP_DEF(iemOp_jns_Jv)
7888{
7889 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
7890 IEMOP_HLP_MIN_386();
7891 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7892 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7893 {
7894 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7895 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7897 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7898 IEM_MC_ADVANCE_RIP_AND_FINISH();
7899 } IEM_MC_ELSE() {
7900 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7901 } IEM_MC_ENDIF();
7902 IEM_MC_END();
7903 }
7904 else
7905 {
7906 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7907 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7909 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7910 IEM_MC_ADVANCE_RIP_AND_FINISH();
7911 } IEM_MC_ELSE() {
7912 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7913 } IEM_MC_ENDIF();
7914 IEM_MC_END();
7915 }
7916}
7917
7918
7919/**
7920 * @opcode 0x8a
7921 * @opfltest pf
7922 */
7923FNIEMOP_DEF(iemOp_jp_Jv)
7924{
7925 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
7926 IEMOP_HLP_MIN_386();
7927 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7928 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7929 {
7930 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7931 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7933 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7934 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7935 } IEM_MC_ELSE() {
7936 IEM_MC_ADVANCE_RIP_AND_FINISH();
7937 } IEM_MC_ENDIF();
7938 IEM_MC_END();
7939 }
7940 else
7941 {
7942 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7943 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7945 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7946 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7947 } IEM_MC_ELSE() {
7948 IEM_MC_ADVANCE_RIP_AND_FINISH();
7949 } IEM_MC_ENDIF();
7950 IEM_MC_END();
7951 }
7952}
7953
7954
7955/**
7956 * @opcode 0x8b
7957 * @opfltest pf
7958 */
7959FNIEMOP_DEF(iemOp_jnp_Jv)
7960{
7961 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
7962 IEMOP_HLP_MIN_386();
7963 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7964 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7965 {
7966 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7967 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7969 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7970 IEM_MC_ADVANCE_RIP_AND_FINISH();
7971 } IEM_MC_ELSE() {
7972 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7973 } IEM_MC_ENDIF();
7974 IEM_MC_END();
7975 }
7976 else
7977 {
7978 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7979 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7981 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7982 IEM_MC_ADVANCE_RIP_AND_FINISH();
7983 } IEM_MC_ELSE() {
7984 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7985 } IEM_MC_ENDIF();
7986 IEM_MC_END();
7987 }
7988}
7989
7990
7991/**
7992 * @opcode 0x8c
7993 * @opfltest sf,of
7994 */
7995FNIEMOP_DEF(iemOp_jl_Jv)
7996{
7997 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
7998 IEMOP_HLP_MIN_386();
7999 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8000 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8001 {
8002 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8003 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8005 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8006 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8007 } IEM_MC_ELSE() {
8008 IEM_MC_ADVANCE_RIP_AND_FINISH();
8009 } IEM_MC_ENDIF();
8010 IEM_MC_END();
8011 }
8012 else
8013 {
8014 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8015 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8017 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8018 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8019 } IEM_MC_ELSE() {
8020 IEM_MC_ADVANCE_RIP_AND_FINISH();
8021 } IEM_MC_ENDIF();
8022 IEM_MC_END();
8023 }
8024}
8025
8026
8027/**
8028 * @opcode 0x8d
8029 * @opfltest sf,of
8030 */
8031FNIEMOP_DEF(iemOp_jnl_Jv)
8032{
8033 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8034 IEMOP_HLP_MIN_386();
8035 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8036 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8037 {
8038 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8039 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8041 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8042 IEM_MC_ADVANCE_RIP_AND_FINISH();
8043 } IEM_MC_ELSE() {
8044 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8045 } IEM_MC_ENDIF();
8046 IEM_MC_END();
8047 }
8048 else
8049 {
8050 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8051 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8053 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8054 IEM_MC_ADVANCE_RIP_AND_FINISH();
8055 } IEM_MC_ELSE() {
8056 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8057 } IEM_MC_ENDIF();
8058 IEM_MC_END();
8059 }
8060}
8061
8062
8063/**
8064 * @opcode 0x8e
8065 * @opfltest zf,sf,of
8066 */
8067FNIEMOP_DEF(iemOp_jle_Jv)
8068{
8069 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8070 IEMOP_HLP_MIN_386();
8071 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8072 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8073 {
8074 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8075 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8077 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8078 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8079 } IEM_MC_ELSE() {
8080 IEM_MC_ADVANCE_RIP_AND_FINISH();
8081 } IEM_MC_ENDIF();
8082 IEM_MC_END();
8083 }
8084 else
8085 {
8086 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8087 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8089 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8090 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8091 } IEM_MC_ELSE() {
8092 IEM_MC_ADVANCE_RIP_AND_FINISH();
8093 } IEM_MC_ENDIF();
8094 IEM_MC_END();
8095 }
8096}
8097
8098
8099/**
8100 * @opcode 0x8f
8101 * @opfltest zf,sf,of
8102 */
8103FNIEMOP_DEF(iemOp_jnle_Jv)
8104{
8105 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8106 IEMOP_HLP_MIN_386();
8107 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8108 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8109 {
8110 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8111 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8113 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8114 IEM_MC_ADVANCE_RIP_AND_FINISH();
8115 } IEM_MC_ELSE() {
8116 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8117 } IEM_MC_ENDIF();
8118 IEM_MC_END();
8119 }
8120 else
8121 {
8122 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8123 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8125 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8126 IEM_MC_ADVANCE_RIP_AND_FINISH();
8127 } IEM_MC_ELSE() {
8128 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8129 } IEM_MC_ENDIF();
8130 IEM_MC_END();
8131 }
8132}
8133
8134
8135/**
8136 * @opcode 0x90
8137 * @opfltest of
8138 */
8139FNIEMOP_DEF(iemOp_seto_Eb)
8140{
8141 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8142 IEMOP_HLP_MIN_386();
8143 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8144
8145 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8146 * any way. AMD says it's "unused", whatever that means. We're
8147 * ignoring for now. */
8148 if (IEM_IS_MODRM_REG_MODE(bRm))
8149 {
8150 /* register target */
8151 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8153 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8154 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8155 } IEM_MC_ELSE() {
8156 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8157 } IEM_MC_ENDIF();
8158 IEM_MC_ADVANCE_RIP_AND_FINISH();
8159 IEM_MC_END();
8160 }
8161 else
8162 {
8163 /* memory target */
8164 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8168 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8169 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8170 } IEM_MC_ELSE() {
8171 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8172 } IEM_MC_ENDIF();
8173 IEM_MC_ADVANCE_RIP_AND_FINISH();
8174 IEM_MC_END();
8175 }
8176}
8177
8178
8179/**
8180 * @opcode 0x91
8181 * @opfltest of
8182 */
8183FNIEMOP_DEF(iemOp_setno_Eb)
8184{
8185 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8186 IEMOP_HLP_MIN_386();
8187 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8188
8189 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8190 * any way. AMD says it's "unused", whatever that means. We're
8191 * ignoring for now. */
8192 if (IEM_IS_MODRM_REG_MODE(bRm))
8193 {
8194 /* register target */
8195 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8197 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8198 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8199 } IEM_MC_ELSE() {
8200 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8201 } IEM_MC_ENDIF();
8202 IEM_MC_ADVANCE_RIP_AND_FINISH();
8203 IEM_MC_END();
8204 }
8205 else
8206 {
8207 /* memory target */
8208 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8212 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8213 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8214 } IEM_MC_ELSE() {
8215 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8216 } IEM_MC_ENDIF();
8217 IEM_MC_ADVANCE_RIP_AND_FINISH();
8218 IEM_MC_END();
8219 }
8220}
8221
8222
8223/**
8224 * @opcode 0x92
8225 * @opfltest cf
8226 */
8227FNIEMOP_DEF(iemOp_setc_Eb)
8228{
8229 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8230 IEMOP_HLP_MIN_386();
8231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8232
8233 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8234 * any way. AMD says it's "unused", whatever that means. We're
8235 * ignoring for now. */
8236 if (IEM_IS_MODRM_REG_MODE(bRm))
8237 {
8238 /* register target */
8239 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8241 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8242 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8243 } IEM_MC_ELSE() {
8244 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8245 } IEM_MC_ENDIF();
8246 IEM_MC_ADVANCE_RIP_AND_FINISH();
8247 IEM_MC_END();
8248 }
8249 else
8250 {
8251 /* memory target */
8252 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8256 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8257 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8258 } IEM_MC_ELSE() {
8259 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8260 } IEM_MC_ENDIF();
8261 IEM_MC_ADVANCE_RIP_AND_FINISH();
8262 IEM_MC_END();
8263 }
8264}
8265
8266
8267/**
8268 * @opcode 0x93
8269 * @opfltest cf
8270 */
8271FNIEMOP_DEF(iemOp_setnc_Eb)
8272{
8273 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8274 IEMOP_HLP_MIN_386();
8275 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8276
8277 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8278 * any way. AMD says it's "unused", whatever that means. We're
8279 * ignoring for now. */
8280 if (IEM_IS_MODRM_REG_MODE(bRm))
8281 {
8282 /* register target */
8283 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8285 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8286 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8287 } IEM_MC_ELSE() {
8288 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8289 } IEM_MC_ENDIF();
8290 IEM_MC_ADVANCE_RIP_AND_FINISH();
8291 IEM_MC_END();
8292 }
8293 else
8294 {
8295 /* memory target */
8296 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8300 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8301 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8302 } IEM_MC_ELSE() {
8303 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8304 } IEM_MC_ENDIF();
8305 IEM_MC_ADVANCE_RIP_AND_FINISH();
8306 IEM_MC_END();
8307 }
8308}
8309
8310
8311/**
8312 * @opcode 0x94
8313 * @opfltest zf
8314 */
8315FNIEMOP_DEF(iemOp_sete_Eb)
8316{
8317 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8318 IEMOP_HLP_MIN_386();
8319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8320
8321 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8322 * any way. AMD says it's "unused", whatever that means. We're
8323 * ignoring for now. */
8324 if (IEM_IS_MODRM_REG_MODE(bRm))
8325 {
8326 /* register target */
8327 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8329 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8330 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8331 } IEM_MC_ELSE() {
8332 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8333 } IEM_MC_ENDIF();
8334 IEM_MC_ADVANCE_RIP_AND_FINISH();
8335 IEM_MC_END();
8336 }
8337 else
8338 {
8339 /* memory target */
8340 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8344 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8345 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8346 } IEM_MC_ELSE() {
8347 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8348 } IEM_MC_ENDIF();
8349 IEM_MC_ADVANCE_RIP_AND_FINISH();
8350 IEM_MC_END();
8351 }
8352}
8353
8354
8355/**
8356 * @opcode 0x95
8357 * @opfltest zf
8358 */
8359FNIEMOP_DEF(iemOp_setne_Eb)
8360{
8361 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8362 IEMOP_HLP_MIN_386();
8363 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8364
8365 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8366 * any way. AMD says it's "unused", whatever that means. We're
8367 * ignoring for now. */
8368 if (IEM_IS_MODRM_REG_MODE(bRm))
8369 {
8370 /* register target */
8371 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8373 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8374 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8375 } IEM_MC_ELSE() {
8376 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8377 } IEM_MC_ENDIF();
8378 IEM_MC_ADVANCE_RIP_AND_FINISH();
8379 IEM_MC_END();
8380 }
8381 else
8382 {
8383 /* memory target */
8384 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8388 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8389 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8390 } IEM_MC_ELSE() {
8391 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8392 } IEM_MC_ENDIF();
8393 IEM_MC_ADVANCE_RIP_AND_FINISH();
8394 IEM_MC_END();
8395 }
8396}
8397
8398
8399/**
8400 * @opcode 0x96
8401 * @opfltest cf,zf
8402 */
8403FNIEMOP_DEF(iemOp_setbe_Eb)
8404{
8405 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8406 IEMOP_HLP_MIN_386();
8407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8408
8409 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8410 * any way. AMD says it's "unused", whatever that means. We're
8411 * ignoring for now. */
8412 if (IEM_IS_MODRM_REG_MODE(bRm))
8413 {
8414 /* register target */
8415 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8417 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8418 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8419 } IEM_MC_ELSE() {
8420 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8421 } IEM_MC_ENDIF();
8422 IEM_MC_ADVANCE_RIP_AND_FINISH();
8423 IEM_MC_END();
8424 }
8425 else
8426 {
8427 /* memory target */
8428 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8432 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8433 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8434 } IEM_MC_ELSE() {
8435 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8436 } IEM_MC_ENDIF();
8437 IEM_MC_ADVANCE_RIP_AND_FINISH();
8438 IEM_MC_END();
8439 }
8440}
8441
8442
8443/**
8444 * @opcode 0x97
8445 * @opfltest cf,zf
8446 */
8447FNIEMOP_DEF(iemOp_setnbe_Eb)
8448{
8449 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8450 IEMOP_HLP_MIN_386();
8451 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8452
8453 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8454 * any way. AMD says it's "unused", whatever that means. We're
8455 * ignoring for now. */
8456 if (IEM_IS_MODRM_REG_MODE(bRm))
8457 {
8458 /* register target */
8459 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8461 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8462 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8463 } IEM_MC_ELSE() {
8464 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8465 } IEM_MC_ENDIF();
8466 IEM_MC_ADVANCE_RIP_AND_FINISH();
8467 IEM_MC_END();
8468 }
8469 else
8470 {
8471 /* memory target */
8472 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8476 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8477 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8478 } IEM_MC_ELSE() {
8479 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8480 } IEM_MC_ENDIF();
8481 IEM_MC_ADVANCE_RIP_AND_FINISH();
8482 IEM_MC_END();
8483 }
8484}
8485
8486
8487/**
8488 * @opcode 0x98
8489 * @opfltest sf
8490 */
8491FNIEMOP_DEF(iemOp_sets_Eb)
8492{
8493 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8494 IEMOP_HLP_MIN_386();
8495 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8496
8497 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8498 * any way. AMD says it's "unused", whatever that means. We're
8499 * ignoring for now. */
8500 if (IEM_IS_MODRM_REG_MODE(bRm))
8501 {
8502 /* register target */
8503 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8505 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8506 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8507 } IEM_MC_ELSE() {
8508 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8509 } IEM_MC_ENDIF();
8510 IEM_MC_ADVANCE_RIP_AND_FINISH();
8511 IEM_MC_END();
8512 }
8513 else
8514 {
8515 /* memory target */
8516 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8520 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8521 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8522 } IEM_MC_ELSE() {
8523 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8524 } IEM_MC_ENDIF();
8525 IEM_MC_ADVANCE_RIP_AND_FINISH();
8526 IEM_MC_END();
8527 }
8528}
8529
8530
8531/**
8532 * @opcode 0x99
8533 * @opfltest sf
8534 */
8535FNIEMOP_DEF(iemOp_setns_Eb)
8536{
8537 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8538 IEMOP_HLP_MIN_386();
8539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8540
8541 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8542 * any way. AMD says it's "unused", whatever that means. We're
8543 * ignoring for now. */
8544 if (IEM_IS_MODRM_REG_MODE(bRm))
8545 {
8546 /* register target */
8547 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8549 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8550 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8551 } IEM_MC_ELSE() {
8552 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8553 } IEM_MC_ENDIF();
8554 IEM_MC_ADVANCE_RIP_AND_FINISH();
8555 IEM_MC_END();
8556 }
8557 else
8558 {
8559 /* memory target */
8560 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8564 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8565 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8566 } IEM_MC_ELSE() {
8567 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8568 } IEM_MC_ENDIF();
8569 IEM_MC_ADVANCE_RIP_AND_FINISH();
8570 IEM_MC_END();
8571 }
8572}
8573
8574
8575/**
8576 * @opcode 0x9a
8577 * @opfltest pf
8578 */
8579FNIEMOP_DEF(iemOp_setp_Eb)
8580{
8581 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8582 IEMOP_HLP_MIN_386();
8583 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8584
8585 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8586 * any way. AMD says it's "unused", whatever that means. We're
8587 * ignoring for now. */
8588 if (IEM_IS_MODRM_REG_MODE(bRm))
8589 {
8590 /* register target */
8591 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8593 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8594 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8595 } IEM_MC_ELSE() {
8596 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8597 } IEM_MC_ENDIF();
8598 IEM_MC_ADVANCE_RIP_AND_FINISH();
8599 IEM_MC_END();
8600 }
8601 else
8602 {
8603 /* memory target */
8604 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8608 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8609 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8610 } IEM_MC_ELSE() {
8611 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8612 } IEM_MC_ENDIF();
8613 IEM_MC_ADVANCE_RIP_AND_FINISH();
8614 IEM_MC_END();
8615 }
8616}
8617
8618
8619/**
8620 * @opcode 0x9b
8621 * @opfltest pf
8622 */
8623FNIEMOP_DEF(iemOp_setnp_Eb)
8624{
8625 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8626 IEMOP_HLP_MIN_386();
8627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8628
8629 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8630 * any way. AMD says it's "unused", whatever that means. We're
8631 * ignoring for now. */
8632 if (IEM_IS_MODRM_REG_MODE(bRm))
8633 {
8634 /* register target */
8635 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8637 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8638 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8639 } IEM_MC_ELSE() {
8640 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8641 } IEM_MC_ENDIF();
8642 IEM_MC_ADVANCE_RIP_AND_FINISH();
8643 IEM_MC_END();
8644 }
8645 else
8646 {
8647 /* memory target */
8648 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8652 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8653 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8654 } IEM_MC_ELSE() {
8655 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8656 } IEM_MC_ENDIF();
8657 IEM_MC_ADVANCE_RIP_AND_FINISH();
8658 IEM_MC_END();
8659 }
8660}
8661
8662
8663/**
8664 * @opcode 0x9c
8665 * @opfltest sf,of
8666 */
8667FNIEMOP_DEF(iemOp_setl_Eb)
8668{
8669 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8670 IEMOP_HLP_MIN_386();
8671 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8672
8673 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8674 * any way. AMD says it's "unused", whatever that means. We're
8675 * ignoring for now. */
8676 if (IEM_IS_MODRM_REG_MODE(bRm))
8677 {
8678 /* register target */
8679 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8681 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8682 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8683 } IEM_MC_ELSE() {
8684 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8685 } IEM_MC_ENDIF();
8686 IEM_MC_ADVANCE_RIP_AND_FINISH();
8687 IEM_MC_END();
8688 }
8689 else
8690 {
8691 /* memory target */
8692 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8696 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8697 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8698 } IEM_MC_ELSE() {
8699 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8700 } IEM_MC_ENDIF();
8701 IEM_MC_ADVANCE_RIP_AND_FINISH();
8702 IEM_MC_END();
8703 }
8704}
8705
8706
8707/**
8708 * @opcode 0x9d
8709 * @opfltest sf,of
8710 */
8711FNIEMOP_DEF(iemOp_setnl_Eb)
8712{
8713 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8714 IEMOP_HLP_MIN_386();
8715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8716
8717 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8718 * any way. AMD says it's "unused", whatever that means. We're
8719 * ignoring for now. */
8720 if (IEM_IS_MODRM_REG_MODE(bRm))
8721 {
8722 /* register target */
8723 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8725 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8726 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8727 } IEM_MC_ELSE() {
8728 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8729 } IEM_MC_ENDIF();
8730 IEM_MC_ADVANCE_RIP_AND_FINISH();
8731 IEM_MC_END();
8732 }
8733 else
8734 {
8735 /* memory target */
8736 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8740 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8741 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8742 } IEM_MC_ELSE() {
8743 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8744 } IEM_MC_ENDIF();
8745 IEM_MC_ADVANCE_RIP_AND_FINISH();
8746 IEM_MC_END();
8747 }
8748}
8749
8750
8751/**
8752 * @opcode 0x9e
8753 * @opfltest zf,sf,of
8754 */
8755FNIEMOP_DEF(iemOp_setle_Eb)
8756{
8757 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8758 IEMOP_HLP_MIN_386();
8759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8760
8761 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8762 * any way. AMD says it's "unused", whatever that means. We're
8763 * ignoring for now. */
8764 if (IEM_IS_MODRM_REG_MODE(bRm))
8765 {
8766 /* register target */
8767 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8769 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8770 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8771 } IEM_MC_ELSE() {
8772 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8773 } IEM_MC_ENDIF();
8774 IEM_MC_ADVANCE_RIP_AND_FINISH();
8775 IEM_MC_END();
8776 }
8777 else
8778 {
8779 /* memory target */
8780 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8784 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8785 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8786 } IEM_MC_ELSE() {
8787 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8788 } IEM_MC_ENDIF();
8789 IEM_MC_ADVANCE_RIP_AND_FINISH();
8790 IEM_MC_END();
8791 }
8792}
8793
8794
8795/**
8796 * @opcode 0x9f
8797 * @opfltest zf,sf,of
8798 */
8799FNIEMOP_DEF(iemOp_setnle_Eb)
8800{
8801 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8802 IEMOP_HLP_MIN_386();
8803 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8804
8805 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8806 * any way. AMD says it's "unused", whatever that means. We're
8807 * ignoring for now. */
8808 if (IEM_IS_MODRM_REG_MODE(bRm))
8809 {
8810 /* register target */
8811 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8813 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8814 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8815 } IEM_MC_ELSE() {
8816 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8817 } IEM_MC_ENDIF();
8818 IEM_MC_ADVANCE_RIP_AND_FINISH();
8819 IEM_MC_END();
8820 }
8821 else
8822 {
8823 /* memory target */
8824 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8828 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8829 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8830 } IEM_MC_ELSE() {
8831 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8832 } IEM_MC_ENDIF();
8833 IEM_MC_ADVANCE_RIP_AND_FINISH();
8834 IEM_MC_END();
8835 }
8836}
8837
8838
8839/** Opcode 0x0f 0xa0. */
8840FNIEMOP_DEF(iemOp_push_fs)
8841{
8842 IEMOP_MNEMONIC(push_fs, "push fs");
8843 IEMOP_HLP_MIN_386();
8844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8845 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8846}
8847
8848
8849/** Opcode 0x0f 0xa1. */
8850FNIEMOP_DEF(iemOp_pop_fs)
8851{
8852 IEMOP_MNEMONIC(pop_fs, "pop fs");
8853 IEMOP_HLP_MIN_386();
8854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8855 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8856 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
8857 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8858 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8859 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8860 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8861 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS),
8862 iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8863}
8864
8865
8866/** Opcode 0x0f 0xa2. */
8867FNIEMOP_DEF(iemOp_cpuid)
8868{
8869 IEMOP_MNEMONIC(cpuid, "cpuid");
8870 IEMOP_HLP_MIN_486(); /* not all 486es. */
8871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8872 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
8873 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8874 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
8875 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
8876 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX),
8877 iemCImpl_cpuid);
8878}
8879
8880
8881/**
8882 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8883 * iemOp_bts_Ev_Gv.
8884 */
8885
8886#define IEMOP_BODY_BIT_Ev_Gv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
8887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8888 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8889 \
8890 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8891 { \
8892 /* register destination. */ \
8893 switch (pVCpu->iem.s.enmEffOpSize) \
8894 { \
8895 case IEMMODE_16BIT: \
8896 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8898 \
8899 IEM_MC_ARG(uint16_t, u16Src, 2); \
8900 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8901 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
8902 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
8903 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8904 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8905 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
8906 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8907 \
8908 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8909 IEM_MC_END(); \
8910 break; \
8911 \
8912 case IEMMODE_32BIT: \
8913 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8915 \
8916 IEM_MC_ARG(uint32_t, u32Src, 2); \
8917 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8918 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
8919 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
8920 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8921 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8922 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
8923 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8924 \
8925 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8926 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8927 IEM_MC_END(); \
8928 break; \
8929 \
8930 case IEMMODE_64BIT: \
8931 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
8932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8933 \
8934 IEM_MC_ARG(uint64_t, u64Src, 2); \
8935 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8936 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
8937 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
8938 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8939 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8940 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
8941 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8942 \
8943 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8944 IEM_MC_END(); \
8945 break; \
8946 \
8947 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8948 } \
8949 } \
8950 else \
8951 { \
8952 /* memory destination. */ \
8953 /** @todo test negative bit offsets! */ \
8954 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
8955 { \
8956 switch (pVCpu->iem.s.enmEffOpSize) \
8957 { \
8958 case IEMMODE_16BIT: \
8959 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8962 IEMOP_HLP_DONE_DECODING(); \
8963 \
8964 IEM_MC_ARG(uint16_t, u16Src, 2); \
8965 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8966 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
8967 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
8968 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
8969 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
8970 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
8971 \
8972 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8973 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
8974 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8975 \
8976 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8977 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
8978 \
8979 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8980 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8981 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8982 IEM_MC_END(); \
8983 break; \
8984 \
8985 case IEMMODE_32BIT: \
8986 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8989 IEMOP_HLP_DONE_DECODING(); \
8990 \
8991 IEM_MC_ARG(uint32_t, u32Src, 2); \
8992 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8993 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
8994 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
8995 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
8996 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
8997 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
8998 \
8999 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9000 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
9001 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9002 \
9003 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9004 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
9005 \
9006 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9007 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9008 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9009 IEM_MC_END(); \
9010 break; \
9011 \
9012 case IEMMODE_64BIT: \
9013 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9016 IEMOP_HLP_DONE_DECODING(); \
9017 \
9018 IEM_MC_ARG(uint64_t, u64Src, 2); \
9019 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9020 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9021 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9022 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9023 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9024 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9025 \
9026 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9027 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
9028 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9029 \
9030 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9031 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
9032 \
9033 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9034 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9035 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9036 IEM_MC_END(); \
9037 break; \
9038 \
9039 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9040 } \
9041 } \
9042 else \
9043 { \
9044 (void)0
9045/* Separate macro to work around parsing issue in IEMAllInstPython.py */
9046#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
9047 switch (pVCpu->iem.s.enmEffOpSize) \
9048 { \
9049 case IEMMODE_16BIT: \
9050 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9053 IEMOP_HLP_DONE_DECODING(); \
9054 \
9055 IEM_MC_ARG(uint16_t, u16Src, 2); \
9056 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9057 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9058 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9059 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9060 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9061 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9062 \
9063 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9064 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
9065 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9066 \
9067 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9068 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU16, fEFlagsIn, pu16Dst, u16Src); \
9069 \
9070 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9071 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9072 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9073 IEM_MC_END(); \
9074 break; \
9075 \
9076 case IEMMODE_32BIT: \
9077 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9080 IEMOP_HLP_DONE_DECODING(); \
9081 \
9082 IEM_MC_ARG(uint32_t, u32Src, 2); \
9083 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9084 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9085 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9086 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9087 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9088 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9089 \
9090 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9091 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
9092 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9093 \
9094 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9095 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU32, fEFlagsIn, pu32Dst, u32Src); \
9096 \
9097 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9098 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9099 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9100 IEM_MC_END(); \
9101 break; \
9102 \
9103 case IEMMODE_64BIT: \
9104 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9107 IEMOP_HLP_DONE_DECODING(); \
9108 \
9109 IEM_MC_ARG(uint64_t, u64Src, 2); \
9110 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9111 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9112 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9113 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9114 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9115 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9116 \
9117 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9118 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
9119 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9120 \
9121 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9122 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU64, fEFlagsIn, pu64Dst, u64Src); \
9123 \
9124 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9125 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9126 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9127 IEM_MC_END(); \
9128 break; \
9129 \
9130 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9131 } \
9132 } \
9133 } \
9134 (void)0
9135
9136/* Read-only version (bt). */
9137#define IEMOP_BODY_BIT_Ev_Gv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
9138 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9139 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
9140 \
9141 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9142 { \
9143 /* register destination. */ \
9144 switch (pVCpu->iem.s.enmEffOpSize) \
9145 { \
9146 case IEMMODE_16BIT: \
9147 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9149 \
9150 IEM_MC_ARG(uint16_t, u16Src, 2); \
9151 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9152 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9153 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
9154 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9155 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9156 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
9157 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9158 \
9159 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9160 IEM_MC_END(); \
9161 break; \
9162 \
9163 case IEMMODE_32BIT: \
9164 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9166 \
9167 IEM_MC_ARG(uint32_t, u32Src, 2); \
9168 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9169 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9170 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
9171 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9172 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9173 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
9174 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9175 \
9176 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9177 IEM_MC_END(); \
9178 break; \
9179 \
9180 case IEMMODE_64BIT: \
9181 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9183 \
9184 IEM_MC_ARG(uint64_t, u64Src, 2); \
9185 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9186 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9187 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
9188 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9189 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9190 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
9191 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9192 \
9193 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9194 IEM_MC_END(); \
9195 break; \
9196 \
9197 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9198 } \
9199 } \
9200 else \
9201 { \
9202 /* memory destination. */ \
9203 /** @todo test negative bit offsets! */ \
9204 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9205 { \
9206 switch (pVCpu->iem.s.enmEffOpSize) \
9207 { \
9208 case IEMMODE_16BIT: \
9209 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9212 IEMOP_HLP_DONE_DECODING(); \
9213 \
9214 IEM_MC_ARG(uint16_t, u16Src, 2); \
9215 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9216 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9217 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9218 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9219 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9220 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9221 \
9222 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9223 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
9224 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9225 \
9226 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9227 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
9228 \
9229 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9230 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9231 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9232 IEM_MC_END(); \
9233 break; \
9234 \
9235 case IEMMODE_32BIT: \
9236 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9239 IEMOP_HLP_DONE_DECODING(); \
9240 \
9241 IEM_MC_ARG(uint32_t, u32Src, 2); \
9242 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9243 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9244 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9245 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9246 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9247 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9248 \
9249 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
9250 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9251 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9252 \
9253 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9254 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
9255 \
9256 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9257 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9258 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9259 IEM_MC_END(); \
9260 break; \
9261 \
9262 case IEMMODE_64BIT: \
9263 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9266 IEMOP_HLP_DONE_DECODING(); \
9267 \
9268 IEM_MC_ARG(uint64_t, u64Src, 2); \
9269 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9270 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9271 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9272 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9273 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9274 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9275 \
9276 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9277 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
9278 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9279 \
9280 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9281 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
9282 \
9283 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9284 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9285 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9286 IEM_MC_END(); \
9287 break; \
9288 \
9289 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9290 } \
9291 } \
9292 else \
9293 { \
9294 IEMOP_HLP_DONE_DECODING(); \
9295 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9296 } \
9297 } \
9298 (void)0
9299
9300
9301/**
9302 * @opcode 0xa3
9303 * @oppfx n/a
9304 * @opflclass bitmap
9305 */
9306FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9307{
9308 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9309 IEMOP_HLP_MIN_386();
9310 IEMOP_BODY_BIT_Ev_Gv_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
9311}
9312
9313
9314/**
9315 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9316 */
9317#define IEMOP_BODY_SHLD_SHR_Ib(a_pImplExpr) \
9318 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9319 \
9320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9321 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9322 \
9323 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9324 { \
9325 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9326 \
9327 switch (pVCpu->iem.s.enmEffOpSize) \
9328 { \
9329 case IEMMODE_16BIT: \
9330 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9332 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9333 IEM_MC_ARG(uint16_t, u16Src, 1); \
9334 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9335 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9336 \
9337 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9338 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9339 IEM_MC_REF_EFLAGS(pEFlags); \
9340 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9341 \
9342 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9343 IEM_MC_END(); \
9344 break; \
9345 \
9346 case IEMMODE_32BIT: \
9347 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9349 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9350 IEM_MC_ARG(uint32_t, u32Src, 1); \
9351 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9352 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9353 \
9354 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9355 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9356 IEM_MC_REF_EFLAGS(pEFlags); \
9357 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9358 \
9359 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9360 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9361 IEM_MC_END(); \
9362 break; \
9363 \
9364 case IEMMODE_64BIT: \
9365 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9367 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9368 IEM_MC_ARG(uint64_t, u64Src, 1); \
9369 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9370 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9371 \
9372 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9373 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9374 IEM_MC_REF_EFLAGS(pEFlags); \
9375 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9376 \
9377 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9378 IEM_MC_END(); \
9379 break; \
9380 \
9381 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9382 } \
9383 } \
9384 else \
9385 { \
9386 switch (pVCpu->iem.s.enmEffOpSize) \
9387 { \
9388 case IEMMODE_16BIT: \
9389 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9392 \
9393 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9395 \
9396 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9397 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9398 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9399 \
9400 IEM_MC_ARG(uint16_t, u16Src, 1); \
9401 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9402 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9403 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9404 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9405 \
9406 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9407 IEM_MC_COMMIT_EFLAGS(EFlags); \
9408 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9409 IEM_MC_END(); \
9410 break; \
9411 \
9412 case IEMMODE_32BIT: \
9413 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9416 \
9417 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9419 \
9420 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9421 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9422 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9423 \
9424 IEM_MC_ARG(uint32_t, u32Src, 1); \
9425 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9426 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9427 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9428 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9429 \
9430 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9431 IEM_MC_COMMIT_EFLAGS(EFlags); \
9432 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9433 IEM_MC_END(); \
9434 break; \
9435 \
9436 case IEMMODE_64BIT: \
9437 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9440 \
9441 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9443 \
9444 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9445 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9446 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9447 \
9448 IEM_MC_ARG(uint64_t, u64Src, 1); \
9449 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9450 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9451 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9452 \
9453 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9454 \
9455 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9456 IEM_MC_COMMIT_EFLAGS(EFlags); \
9457 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9458 IEM_MC_END(); \
9459 break; \
9460 \
9461 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9462 } \
9463 } (void)0
9464
9465
9466/**
9467 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9468 */
9469#define IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(a_pImplExpr) \
9470 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9471 \
9472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9473 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9474 \
9475 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9476 { \
9477 switch (pVCpu->iem.s.enmEffOpSize) \
9478 { \
9479 case IEMMODE_16BIT: \
9480 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9482 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9483 IEM_MC_ARG(uint16_t, u16Src, 1); \
9484 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9485 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9486 \
9487 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9488 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9489 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9490 IEM_MC_REF_EFLAGS(pEFlags); \
9491 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9492 \
9493 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9494 IEM_MC_END(); \
9495 break; \
9496 \
9497 case IEMMODE_32BIT: \
9498 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9500 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9501 IEM_MC_ARG(uint32_t, u32Src, 1); \
9502 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9503 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9504 \
9505 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9506 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9507 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9508 IEM_MC_REF_EFLAGS(pEFlags); \
9509 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9510 \
9511 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9512 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9513 IEM_MC_END(); \
9514 break; \
9515 \
9516 case IEMMODE_64BIT: \
9517 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9519 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9520 IEM_MC_ARG(uint64_t, u64Src, 1); \
9521 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9522 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9523 \
9524 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9525 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9526 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9527 IEM_MC_REF_EFLAGS(pEFlags); \
9528 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9529 \
9530 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9531 IEM_MC_END(); \
9532 break; \
9533 \
9534 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9535 } \
9536 } \
9537 else \
9538 { \
9539 switch (pVCpu->iem.s.enmEffOpSize) \
9540 { \
9541 case IEMMODE_16BIT: \
9542 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9543 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9544 IEM_MC_ARG(uint16_t, u16Src, 1); \
9545 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9547 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9548 \
9549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9551 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9552 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9553 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9554 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9555 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9556 \
9557 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9558 IEM_MC_COMMIT_EFLAGS(EFlags); \
9559 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9560 IEM_MC_END(); \
9561 break; \
9562 \
9563 case IEMMODE_32BIT: \
9564 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9565 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9566 IEM_MC_ARG(uint32_t, u32Src, 1); \
9567 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9569 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9570 \
9571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9573 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9574 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9575 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9576 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9577 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9578 \
9579 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9580 IEM_MC_COMMIT_EFLAGS(EFlags); \
9581 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9582 IEM_MC_END(); \
9583 break; \
9584 \
9585 case IEMMODE_64BIT: \
9586 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9587 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9588 IEM_MC_ARG(uint64_t, u64Src, 1); \
9589 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9591 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9592 \
9593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9595 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9596 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9597 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9598 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9599 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9600 \
9601 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9602 IEM_MC_COMMIT_EFLAGS(EFlags); \
9603 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9604 IEM_MC_END(); \
9605 break; \
9606 \
9607 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9608 } \
9609 } (void)0
9610
9611
9612/**
9613 * @opcode 0xa4
9614 * @opflclass shift_count
9615 */
9616FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9617{
9618 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9619 IEMOP_HLP_MIN_386();
9620 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9621}
9622
9623
9624/**
9625 * @opcode 0xa5
9626 * @opflclass shift_count
9627 */
9628FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9629{
9630 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9631 IEMOP_HLP_MIN_386();
9632 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9633}
9634
9635
9636/** Opcode 0x0f 0xa8. */
9637FNIEMOP_DEF(iemOp_push_gs)
9638{
9639 IEMOP_MNEMONIC(push_gs, "push gs");
9640 IEMOP_HLP_MIN_386();
9641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9642 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9643}
9644
9645
9646/** Opcode 0x0f 0xa9. */
9647FNIEMOP_DEF(iemOp_pop_gs)
9648{
9649 IEMOP_MNEMONIC(pop_gs, "pop gs");
9650 IEMOP_HLP_MIN_386();
9651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9652 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9653 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
9654 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9655 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9656 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9657 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9658 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9659 iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9660}
9661
9662
9663/** Opcode 0x0f 0xaa. */
9664FNIEMOP_DEF(iemOp_rsm)
9665{
9666 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9667 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9669 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9670 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
9671 iemCImpl_rsm);
9672}
9673
9674
9675
9676/**
9677 * @opcode 0xab
9678 * @oppfx n/a
9679 * @opflclass bitmap
9680 */
9681FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9682{
9683 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9684 IEMOP_HLP_MIN_386();
9685 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
9686 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9687}
9688
9689
9690/**
9691 * @opcode 0xac
9692 * @opflclass shift_count
9693 */
9694FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9695{
9696 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9697 IEMOP_HLP_MIN_386();
9698 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9699}
9700
9701
9702/**
9703 * @opcode 0xad
9704 * @opflclass shift_count
9705 */
9706FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9707{
9708 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9709 IEMOP_HLP_MIN_386();
9710 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9711}
9712
9713
9714/** Opcode 0x0f 0xae mem/0. */
9715FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9716{
9717 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9718 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9719 IEMOP_RAISE_INVALID_OPCODE_RET();
9720
9721 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9722 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9725 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9726 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9727 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9728 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9729 IEM_MC_END();
9730}
9731
9732
9733/** Opcode 0x0f 0xae mem/1. */
9734FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9735{
9736 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9737 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9738 IEMOP_RAISE_INVALID_OPCODE_RET();
9739
9740 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9741 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9744 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9745 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9746 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9747 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw) | RT_BIT_64(kIemNativeGstReg_MxCsr),
9748 iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9749 IEM_MC_END();
9750}
9751
9752
9753/**
9754 * @opmaps grp15
9755 * @opcode !11/2
9756 * @oppfx none
9757 * @opcpuid sse
9758 * @opgroup og_sse_mxcsrsm
9759 * @opxcpttype 5
9760 * @optest op1=0 -> mxcsr=0
9761 * @optest op1=0x2083 -> mxcsr=0x2083
9762 * @optest op1=0xfffffffe -> value.xcpt=0xd
9763 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9764 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9765 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9766 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9767 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9768 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9769 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9770 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9771 */
9772FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9773{
9774 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9775 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9776 IEMOP_RAISE_INVALID_OPCODE_RET();
9777
9778 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9779 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9782 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9783 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9784 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_MxCsr), iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9785 IEM_MC_END();
9786}
9787
9788
9789/**
9790 * @opmaps grp15
9791 * @opcode !11/3
9792 * @oppfx none
9793 * @opcpuid sse
9794 * @opgroup og_sse_mxcsrsm
9795 * @opxcpttype 5
9796 * @optest mxcsr=0 -> op1=0
9797 * @optest mxcsr=0x2083 -> op1=0x2083
9798 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9799 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9800 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9801 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9802 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9803 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9804 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9805 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9806 */
9807FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9808{
9809 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9810 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9811 IEMOP_RAISE_INVALID_OPCODE_RET();
9812
9813 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9814 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9817 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9818 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9819 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9820 IEM_MC_END();
9821}
9822
9823
9824/**
9825 * @opmaps grp15
9826 * @opcode !11/4
9827 * @oppfx none
9828 * @opcpuid xsave
9829 * @opgroup og_system
9830 * @opxcpttype none
9831 */
9832FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9833{
9834 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9835 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9836 IEMOP_RAISE_INVALID_OPCODE_RET();
9837
9838 IEM_MC_BEGIN(IEM_MC_F_MIN_CORE, 0);
9839 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9842 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9843 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9844 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9845 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9846 IEM_MC_END();
9847}
9848
9849
9850/**
9851 * @opmaps grp15
9852 * @opcode !11/5
9853 * @oppfx none
9854 * @opcpuid xsave
9855 * @opgroup og_system
9856 * @opxcpttype none
9857 */
9858FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9859{
9860 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9861 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9862 IEMOP_RAISE_INVALID_OPCODE_RET();
9863
9864 IEM_MC_BEGIN(IEM_MC_F_MIN_CORE, 0);
9865 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9868 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9869 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9870 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9871 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw) | RT_BIT_64(kIemNativeGstReg_MxCsr),
9872 iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9873 IEM_MC_END();
9874}
9875
9876/** Opcode 0x0f 0xae mem/6. */
9877FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9878
9879/**
9880 * @opmaps grp15
9881 * @opcode !11/7
9882 * @oppfx none
9883 * @opcpuid clfsh
9884 * @opgroup og_cachectl
9885 * @optest op1=1 ->
9886 */
9887FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9888{
9889 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9890 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9891 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9892
9893 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9894 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9897 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9898 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9899 IEM_MC_END();
9900}
9901
9902/**
9903 * @opmaps grp15
9904 * @opcode !11/7
9905 * @oppfx 0x66
9906 * @opcpuid clflushopt
9907 * @opgroup og_cachectl
9908 * @optest op1=1 ->
9909 */
9910FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9911{
9912 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9913 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9914 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9915
9916 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9917 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9920 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9921 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9922 IEM_MC_END();
9923}
9924
9925
9926/** Opcode 0x0f 0xae 11b/5. */
9927FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9928{
9929 RT_NOREF_PV(bRm);
9930 IEMOP_MNEMONIC(lfence, "lfence");
9931 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9933#ifdef RT_ARCH_ARM64
9934 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9935#else
9936 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9937 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9938 else
9939 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9940#endif
9941 IEM_MC_ADVANCE_RIP_AND_FINISH();
9942 IEM_MC_END();
9943}
9944
9945
9946/** Opcode 0x0f 0xae 11b/6. */
9947FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9948{
9949 RT_NOREF_PV(bRm);
9950 IEMOP_MNEMONIC(mfence, "mfence");
9951 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9953#ifdef RT_ARCH_ARM64
9954 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9955#else
9956 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9957 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9958 else
9959 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9960#endif
9961 IEM_MC_ADVANCE_RIP_AND_FINISH();
9962 IEM_MC_END();
9963}
9964
9965
9966/** Opcode 0x0f 0xae 11b/7. */
9967FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9968{
9969 RT_NOREF_PV(bRm);
9970 IEMOP_MNEMONIC(sfence, "sfence");
9971 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9973#ifdef RT_ARCH_ARM64
9974 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9975#else
9976 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9977 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9978 else
9979 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9980#endif
9981 IEM_MC_ADVANCE_RIP_AND_FINISH();
9982 IEM_MC_END();
9983}
9984
9985
9986/** Opcode 0xf3 0x0f 0xae 11b/0. */
9987FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9988{
9989 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9990 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9991 {
9992 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9994 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9995 IEM_MC_LOCAL(uint64_t, u64Dst);
9996 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9997 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9998 IEM_MC_ADVANCE_RIP_AND_FINISH();
9999 IEM_MC_END();
10000 }
10001 else
10002 {
10003 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10005 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10006 IEM_MC_LOCAL(uint32_t, u32Dst);
10007 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
10008 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10009 IEM_MC_ADVANCE_RIP_AND_FINISH();
10010 IEM_MC_END();
10011 }
10012}
10013
10014
10015/** Opcode 0xf3 0x0f 0xae 11b/1. */
10016FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
10017{
10018 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
10019 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10020 {
10021 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10023 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10024 IEM_MC_LOCAL(uint64_t, u64Dst);
10025 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
10026 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10027 IEM_MC_ADVANCE_RIP_AND_FINISH();
10028 IEM_MC_END();
10029 }
10030 else
10031 {
10032 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10034 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10035 IEM_MC_LOCAL(uint32_t, u32Dst);
10036 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
10037 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10038 IEM_MC_ADVANCE_RIP_AND_FINISH();
10039 IEM_MC_END();
10040 }
10041}
10042
10043
10044/** Opcode 0xf3 0x0f 0xae 11b/2. */
10045FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
10046{
10047 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
10048 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10049 {
10050 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10052 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10053 IEM_MC_LOCAL(uint64_t, u64Dst);
10054 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10055 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10056 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
10057 IEM_MC_ADVANCE_RIP_AND_FINISH();
10058 IEM_MC_END();
10059 }
10060 else
10061 {
10062 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10064 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10065 IEM_MC_LOCAL(uint32_t, u32Dst);
10066 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10067 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
10068 IEM_MC_ADVANCE_RIP_AND_FINISH();
10069 IEM_MC_END();
10070 }
10071}
10072
10073
10074/** Opcode 0xf3 0x0f 0xae 11b/3. */
10075FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
10076{
10077 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
10078 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10079 {
10080 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10082 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10083 IEM_MC_LOCAL(uint64_t, u64Dst);
10084 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10085 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10086 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
10087 IEM_MC_ADVANCE_RIP_AND_FINISH();
10088 IEM_MC_END();
10089 }
10090 else
10091 {
10092 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10094 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10095 IEM_MC_LOCAL(uint32_t, u32Dst);
10096 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10097 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10098 IEM_MC_ADVANCE_RIP_AND_FINISH();
10099 IEM_MC_END();
10100 }
10101}
10102
10103
10104/**
10105 * Group 15 jump table for register variant.
10106 */
10107IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10108{ /* pfx: none, 066h, 0f3h, 0f2h */
10109 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10110 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10111 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10112 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10113 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10114 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10115 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10116 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10117};
10118AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10119
10120
10121/**
10122 * Group 15 jump table for memory variant.
10123 */
10124IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10125{ /* pfx: none, 066h, 0f3h, 0f2h */
10126 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10127 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10128 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10129 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10130 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10131 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10132 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10133 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10134};
10135AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10136
10137
10138/** Opcode 0x0f 0xae. */
10139FNIEMOP_DEF(iemOp_Grp15)
10140{
10141 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10143 if (IEM_IS_MODRM_REG_MODE(bRm))
10144 /* register, register */
10145 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10146 + pVCpu->iem.s.idxPrefix], bRm);
10147 /* memory, register */
10148 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10149 + pVCpu->iem.s.idxPrefix], bRm);
10150}
10151
10152
10153/**
10154 * @opcode 0xaf
10155 * @opflclass multiply
10156 */
10157FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10158{
10159 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10160 IEMOP_HLP_MIN_386();
10161 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10162 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10163 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10164 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_MIN_386, imul, 0);
10165}
10166
10167
10168/**
10169 * @opcode 0xb0
10170 * @opflclass arithmetic
10171 */
10172FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10173{
10174 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10175 IEMOP_HLP_MIN_486();
10176 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10177
10178 if (IEM_IS_MODRM_REG_MODE(bRm))
10179 {
10180 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10182 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10183 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10184 IEM_MC_ARG(uint8_t, u8Src, 2);
10185 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10186
10187 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10188 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10189 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10190 IEM_MC_REF_EFLAGS(pEFlags);
10191 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10192
10193 IEM_MC_ADVANCE_RIP_AND_FINISH();
10194 IEM_MC_END();
10195 }
10196 else
10197 {
10198#define IEMOP_BODY_CMPXCHG_BYTE(a_fnWorker, a_Type) \
10199 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10202 IEMOP_HLP_DONE_DECODING(); \
10203 \
10204 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10205 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
10206 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10207 \
10208 IEM_MC_ARG(uint8_t, u8Src, 2); \
10209 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10210 \
10211 IEM_MC_LOCAL(uint8_t, u8Al); \
10212 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX); \
10213 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Al, u8Al, 1); \
10214 \
10215 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10216 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu8Dst, pu8Al, u8Src, pEFlags); \
10217 \
10218 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10219 IEM_MC_COMMIT_EFLAGS(EFlags); \
10220 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al); \
10221 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10222 IEM_MC_END()
10223
10224 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10225 {
10226 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8,RW);
10227 }
10228 else
10229 {
10230 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8_locked,ATOMIC);
10231 }
10232 }
10233}
10234
10235/**
10236 * @opcode 0xb1
10237 * @opflclass arithmetic
10238 */
10239FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10240{
10241 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10242 IEMOP_HLP_MIN_486();
10243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10244
10245 if (IEM_IS_MODRM_REG_MODE(bRm))
10246 {
10247 switch (pVCpu->iem.s.enmEffOpSize)
10248 {
10249 case IEMMODE_16BIT:
10250 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10252 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10253 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10254 IEM_MC_ARG(uint16_t, u16Src, 2);
10255 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10256
10257 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10258 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10259 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10260 IEM_MC_REF_EFLAGS(pEFlags);
10261 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10262
10263 IEM_MC_ADVANCE_RIP_AND_FINISH();
10264 IEM_MC_END();
10265 break;
10266
10267 case IEMMODE_32BIT:
10268 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10270 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10271 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10272 IEM_MC_ARG(uint32_t, u32Src, 2);
10273 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10274
10275 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10276 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10277 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10278 IEM_MC_REF_EFLAGS(pEFlags);
10279 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10280
10281 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10282 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
10283 } IEM_MC_ELSE() {
10284 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
10285 } IEM_MC_ENDIF();
10286
10287 IEM_MC_ADVANCE_RIP_AND_FINISH();
10288 IEM_MC_END();
10289 break;
10290
10291 case IEMMODE_64BIT:
10292 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10294 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10295 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10296 IEM_MC_ARG(uint64_t, u64Src, 2);
10297 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10298
10299 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10300 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10301 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10302 IEM_MC_REF_EFLAGS(pEFlags);
10303 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10304
10305 IEM_MC_ADVANCE_RIP_AND_FINISH();
10306 IEM_MC_END();
10307 break;
10308
10309 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10310 }
10311 }
10312 else
10313 {
10314#define IEMOP_BODY_CMPXCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64,a_Type) \
10315 do { \
10316 switch (pVCpu->iem.s.enmEffOpSize) \
10317 { \
10318 case IEMMODE_16BIT: \
10319 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10320 \
10321 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10323 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10324 IEMOP_HLP_DONE_DECODING(); \
10325 \
10326 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10327 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10328 \
10329 IEM_MC_ARG(uint16_t, u16Src, 2); \
10330 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10331 \
10332 IEM_MC_LOCAL(uint16_t, u16Ax); \
10333 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX); \
10334 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Ax, u16Ax, 1); \
10335 \
10336 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10337 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker16, pu16Dst, pu16Ax, u16Src, pEFlags); \
10338 \
10339 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10340 IEM_MC_COMMIT_EFLAGS(EFlags); \
10341 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax); \
10342 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10343 IEM_MC_END(); \
10344 break; \
10345 \
10346 case IEMMODE_32BIT: \
10347 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10350 IEMOP_HLP_DONE_DECODING(); \
10351 \
10352 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10353 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10354 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10355 \
10356 IEM_MC_ARG(uint32_t, u32Src, 2); \
10357 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10358 \
10359 IEM_MC_LOCAL(uint32_t, u32Eax); \
10360 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX); \
10361 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Eax, u32Eax, 1); \
10362 \
10363 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10364 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker32, pu32Dst, pu32Eax, u32Src, pEFlags); \
10365 \
10366 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10367 IEM_MC_COMMIT_EFLAGS(EFlags); \
10368 \
10369 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
10370 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax); \
10371 } IEM_MC_ENDIF(); \
10372 \
10373 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10374 IEM_MC_END(); \
10375 break; \
10376 \
10377 case IEMMODE_64BIT: \
10378 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10381 IEMOP_HLP_DONE_DECODING(); \
10382 \
10383 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10384 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10385 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10386 \
10387 IEM_MC_ARG(uint64_t, u64Src, 2); \
10388 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10389 \
10390 IEM_MC_LOCAL(uint64_t, u64Rax); \
10391 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX); \
10392 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Rax, u64Rax, 1); \
10393 \
10394 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10395 \
10396 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker64, pu64Dst, pu64Rax, u64Src, pEFlags); \
10397 \
10398 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10399 IEM_MC_COMMIT_EFLAGS(EFlags); \
10400 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax); \
10401 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10402 IEM_MC_END(); \
10403 break; \
10404 \
10405 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10406 } \
10407 } while (0)
10408
10409 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10410 {
10411 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16, iemAImpl_cmpxchg_u32, iemAImpl_cmpxchg_u64,RW);
10412 }
10413 else
10414 {
10415 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16_locked, iemAImpl_cmpxchg_u32_locked, iemAImpl_cmpxchg_u64_locked,ATOMIC);
10416 }
10417 }
10418}
10419
10420
10421/** Opcode 0x0f 0xb2. */
10422FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10423{
10424 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10425 IEMOP_HLP_MIN_386();
10426 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10427 if (IEM_IS_MODRM_REG_MODE(bRm))
10428 IEMOP_RAISE_INVALID_OPCODE_RET();
10429 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10430}
10431
10432
10433/**
10434 * @opcode 0xb3
10435 * @oppfx n/a
10436 * @opflclass bitmap
10437 */
10438FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10439{
10440 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10441 IEMOP_HLP_MIN_386();
10442 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
10443 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10444}
10445
10446
10447/** Opcode 0x0f 0xb4. */
10448FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10449{
10450 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10451 IEMOP_HLP_MIN_386();
10452 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10453 if (IEM_IS_MODRM_REG_MODE(bRm))
10454 IEMOP_RAISE_INVALID_OPCODE_RET();
10455 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10456}
10457
10458
10459/** Opcode 0x0f 0xb5. */
10460FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10461{
10462 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10463 IEMOP_HLP_MIN_386();
10464 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10465 if (IEM_IS_MODRM_REG_MODE(bRm))
10466 IEMOP_RAISE_INVALID_OPCODE_RET();
10467 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10468}
10469
10470
10471/** Opcode 0x0f 0xb6. */
10472FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10473{
10474 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10475 IEMOP_HLP_MIN_386();
10476
10477 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10478
10479 /*
10480 * If rm is denoting a register, no more instruction bytes.
10481 */
10482 if (IEM_IS_MODRM_REG_MODE(bRm))
10483 {
10484 switch (pVCpu->iem.s.enmEffOpSize)
10485 {
10486 case IEMMODE_16BIT:
10487 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10489 IEM_MC_LOCAL(uint16_t, u16Value);
10490 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10491 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10492 IEM_MC_ADVANCE_RIP_AND_FINISH();
10493 IEM_MC_END();
10494 break;
10495
10496 case IEMMODE_32BIT:
10497 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10499 IEM_MC_LOCAL(uint32_t, u32Value);
10500 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10501 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10502 IEM_MC_ADVANCE_RIP_AND_FINISH();
10503 IEM_MC_END();
10504 break;
10505
10506 case IEMMODE_64BIT:
10507 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10509 IEM_MC_LOCAL(uint64_t, u64Value);
10510 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10511 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10512 IEM_MC_ADVANCE_RIP_AND_FINISH();
10513 IEM_MC_END();
10514 break;
10515
10516 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10517 }
10518 }
10519 else
10520 {
10521 /*
10522 * We're loading a register from memory.
10523 */
10524 switch (pVCpu->iem.s.enmEffOpSize)
10525 {
10526 case IEMMODE_16BIT:
10527 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10528 IEM_MC_LOCAL(uint16_t, u16Value);
10529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10532 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10533 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10534 IEM_MC_ADVANCE_RIP_AND_FINISH();
10535 IEM_MC_END();
10536 break;
10537
10538 case IEMMODE_32BIT:
10539 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10540 IEM_MC_LOCAL(uint32_t, u32Value);
10541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10544 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10545 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10546 IEM_MC_ADVANCE_RIP_AND_FINISH();
10547 IEM_MC_END();
10548 break;
10549
10550 case IEMMODE_64BIT:
10551 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10552 IEM_MC_LOCAL(uint64_t, u64Value);
10553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10556 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10557 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10558 IEM_MC_ADVANCE_RIP_AND_FINISH();
10559 IEM_MC_END();
10560 break;
10561
10562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10563 }
10564 }
10565}
10566
10567
10568/** Opcode 0x0f 0xb7. */
10569FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10570{
10571 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10572 IEMOP_HLP_MIN_386();
10573
10574 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10575
10576 /** @todo Not entirely sure how the operand size prefix is handled here,
10577 * assuming that it will be ignored. Would be nice to have a few
10578 * test for this. */
10579
10580 /** @todo There should be no difference in the behaviour whether REX.W is
10581 * present or not... */
10582
10583 /*
10584 * If rm is denoting a register, no more instruction bytes.
10585 */
10586 if (IEM_IS_MODRM_REG_MODE(bRm))
10587 {
10588 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10589 {
10590 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10592 IEM_MC_LOCAL(uint32_t, u32Value);
10593 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10594 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10595 IEM_MC_ADVANCE_RIP_AND_FINISH();
10596 IEM_MC_END();
10597 }
10598 else
10599 {
10600 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10602 IEM_MC_LOCAL(uint64_t, u64Value);
10603 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10604 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10605 IEM_MC_ADVANCE_RIP_AND_FINISH();
10606 IEM_MC_END();
10607 }
10608 }
10609 else
10610 {
10611 /*
10612 * We're loading a register from memory.
10613 */
10614 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10615 {
10616 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10617 IEM_MC_LOCAL(uint32_t, u32Value);
10618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10621 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10622 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10623 IEM_MC_ADVANCE_RIP_AND_FINISH();
10624 IEM_MC_END();
10625 }
10626 else
10627 {
10628 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10629 IEM_MC_LOCAL(uint64_t, u64Value);
10630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10633 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10634 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10635 IEM_MC_ADVANCE_RIP_AND_FINISH();
10636 IEM_MC_END();
10637 }
10638 }
10639}
10640
10641
10642/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10643FNIEMOP_UD_STUB(iemOp_jmpe);
10644
10645
10646/**
10647 * @opcode 0xb8
10648 * @oppfx 0xf3
10649 * @opflmodify cf,pf,af,zf,sf,of
10650 * @opflclear cf,pf,af,sf,of
10651 */
10652FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10653{
10654 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10655 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10656 return iemOp_InvalidNeedRM(pVCpu);
10657#ifndef TST_IEM_CHECK_MC
10658# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10659 static const IEMOPBINSIZES s_Native =
10660 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10661# endif
10662 static const IEMOPBINSIZES s_Fallback =
10663 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10664#endif
10665 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10667 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, popcnt, 0);
10668}
10669
10670
10671/**
10672 * @opcode 0xb9
10673 * @opinvalid intel-modrm
10674 * @optest ->
10675 */
10676FNIEMOP_DEF(iemOp_Grp10)
10677{
10678 /*
10679 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10680 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10681 */
10682 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10683 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10684 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10685}
10686
10687
10688/**
10689 * Body for group 8 bit instruction.
10690 */
10691#define IEMOP_BODY_BIT_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10692 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10693 \
10694 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10695 { \
10696 /* register destination. */ \
10697 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10698 \
10699 switch (pVCpu->iem.s.enmEffOpSize) \
10700 { \
10701 case IEMMODE_16BIT: \
10702 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10704 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10705 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10706 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10707 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10708 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10709 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10710 \
10711 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10712 IEM_MC_END(); \
10713 break; \
10714 \
10715 case IEMMODE_32BIT: \
10716 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10718 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10719 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10720 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10721 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10722 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10723 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10724 \
10725 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
10726 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10727 IEM_MC_END(); \
10728 break; \
10729 \
10730 case IEMMODE_64BIT: \
10731 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10733 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10734 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10735 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10736 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10737 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10738 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10739 \
10740 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10741 IEM_MC_END(); \
10742 break; \
10743 \
10744 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10745 } \
10746 } \
10747 else \
10748 { \
10749 /* memory destination. */ \
10750 /** @todo test negative bit offsets! */ \
10751 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
10752 { \
10753 switch (pVCpu->iem.s.enmEffOpSize) \
10754 { \
10755 case IEMMODE_16BIT: \
10756 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10759 \
10760 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10761 IEMOP_HLP_DONE_DECODING(); \
10762 \
10763 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10764 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10765 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10766 \
10767 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10768 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10769 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10770 \
10771 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10772 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10773 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10774 IEM_MC_END(); \
10775 break; \
10776 \
10777 case IEMMODE_32BIT: \
10778 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10781 \
10782 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10783 IEMOP_HLP_DONE_DECODING(); \
10784 \
10785 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10786 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10787 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10788 \
10789 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10790 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10791 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10792 \
10793 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10794 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10795 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10796 IEM_MC_END(); \
10797 break; \
10798 \
10799 case IEMMODE_64BIT: \
10800 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10803 \
10804 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10805 IEMOP_HLP_DONE_DECODING(); \
10806 \
10807 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10808 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10809 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10810 \
10811 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10812 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10813 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10814 \
10815 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10816 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10817 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10818 IEM_MC_END(); \
10819 break; \
10820 \
10821 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10822 } \
10823 } \
10824 else \
10825 { \
10826 (void)0
10827/* Separate macro to work around parsing issue in IEMAllInstPython.py */
10828#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10829 switch (pVCpu->iem.s.enmEffOpSize) \
10830 { \
10831 case IEMMODE_16BIT: \
10832 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10835 \
10836 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10837 IEMOP_HLP_DONE_DECODING(); \
10838 \
10839 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10840 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10841 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10842 \
10843 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10844 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10845 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU16, fEFlagsIn, pu16Dst, u16Src); \
10846 \
10847 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10848 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10849 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10850 IEM_MC_END(); \
10851 break; \
10852 \
10853 case IEMMODE_32BIT: \
10854 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10855 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10856 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10857 \
10858 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10859 IEMOP_HLP_DONE_DECODING(); \
10860 \
10861 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10862 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10863 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10864 \
10865 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10866 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10867 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU32, fEFlagsIn, pu32Dst, u32Src); \
10868 \
10869 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10870 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10871 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10872 IEM_MC_END(); \
10873 break; \
10874 \
10875 case IEMMODE_64BIT: \
10876 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10877 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10879 \
10880 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10881 IEMOP_HLP_DONE_DECODING(); \
10882 \
10883 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10884 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10885 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10886 \
10887 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10888 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10889 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU64, fEFlagsIn, pu64Dst, u64Src); \
10890 \
10891 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10892 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10893 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10894 IEM_MC_END(); \
10895 break; \
10896 \
10897 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10898 } \
10899 } \
10900 } \
10901 (void)0
10902
10903/* Read-only version (bt) */
10904#define IEMOP_BODY_BIT_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10905 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10906 \
10907 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10908 { \
10909 /* register destination. */ \
10910 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10911 \
10912 switch (pVCpu->iem.s.enmEffOpSize) \
10913 { \
10914 case IEMMODE_16BIT: \
10915 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10917 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
10918 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10919 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10920 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10921 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10922 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10923 \
10924 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10925 IEM_MC_END(); \
10926 break; \
10927 \
10928 case IEMMODE_32BIT: \
10929 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10931 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
10932 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10933 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10934 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10935 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10936 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10937 \
10938 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10939 IEM_MC_END(); \
10940 break; \
10941 \
10942 case IEMMODE_64BIT: \
10943 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10945 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
10946 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10947 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10948 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10949 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10950 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10951 \
10952 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10953 IEM_MC_END(); \
10954 break; \
10955 \
10956 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10957 } \
10958 } \
10959 else \
10960 { \
10961 /* memory destination. */ \
10962 /** @todo test negative bit offsets! */ \
10963 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10964 { \
10965 switch (pVCpu->iem.s.enmEffOpSize) \
10966 { \
10967 case IEMMODE_16BIT: \
10968 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10971 \
10972 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10973 IEMOP_HLP_DONE_DECODING(); \
10974 \
10975 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10976 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
10977 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10978 \
10979 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10980 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10981 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10982 \
10983 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
10984 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10985 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10986 IEM_MC_END(); \
10987 break; \
10988 \
10989 case IEMMODE_32BIT: \
10990 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10991 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10993 \
10994 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10995 IEMOP_HLP_DONE_DECODING(); \
10996 \
10997 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10998 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
10999 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11000 \
11001 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11002 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
11003 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
11004 \
11005 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11006 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11007 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11008 IEM_MC_END(); \
11009 break; \
11010 \
11011 case IEMMODE_64BIT: \
11012 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11015 \
11016 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11017 IEMOP_HLP_DONE_DECODING(); \
11018 \
11019 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11020 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
11021 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11022 \
11023 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11024 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
11025 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
11026 \
11027 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11028 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11029 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11030 IEM_MC_END(); \
11031 break; \
11032 \
11033 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11034 } \
11035 } \
11036 else \
11037 { \
11038 IEMOP_HLP_DONE_DECODING(); \
11039 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
11040 } \
11041 } \
11042 (void)0
11043
11044
11045/**
11046 * @opmaps grp8
11047 * @opcode /4
11048 * @oppfx n/a
11049 * @opflclass bitmap
11050 */
11051FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
11052{
11053 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
11054 IEMOP_BODY_BIT_Ev_Ib_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
11055}
11056
11057
11058/**
11059 * @opmaps grp8
11060 * @opcode /5
11061 * @oppfx n/a
11062 * @opflclass bitmap
11063 */
11064FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
11065{
11066 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
11067 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
11068 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
11069}
11070
11071
11072/**
11073 * @opmaps grp8
11074 * @opcode /6
11075 * @oppfx n/a
11076 * @opflclass bitmap
11077 */
11078FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
11079{
11080 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
11081 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
11082 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
11083}
11084
11085
11086/**
11087 * @opmaps grp8
11088 * @opcode /7
11089 * @oppfx n/a
11090 * @opflclass bitmap
11091 */
11092FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
11093{
11094 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
11095 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11096 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11097}
11098
11099
11100/** Opcode 0x0f 0xba. */
11101FNIEMOP_DEF(iemOp_Grp8)
11102{
11103 IEMOP_HLP_MIN_386();
11104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11105 switch (IEM_GET_MODRM_REG_8(bRm))
11106 {
11107 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
11108 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
11109 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
11110 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
11111
11112 case 0: case 1: case 2: case 3:
11113 /* Both AMD and Intel want full modr/m decoding and imm8. */
11114 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
11115
11116 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11117 }
11118}
11119
11120
11121/**
11122 * @opcode 0xbb
11123 * @oppfx n/a
11124 * @opflclass bitmap
11125 */
11126FNIEMOP_DEF(iemOp_btc_Ev_Gv)
11127{
11128 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
11129 IEMOP_HLP_MIN_386();
11130 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11131 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11132}
11133
11134
11135/**
11136 * Body for BSF and BSR instructions.
11137 *
11138 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
11139 * the destination register, which means that for 32-bit operations the high
11140 * bits must be left alone.
11141 *
11142 * @param pImpl Pointer to the instruction implementation (assembly).
11143 */
11144#define IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl) \
11145 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
11146 \
11147 /* \
11148 * If rm is denoting a register, no more instruction bytes. \
11149 */ \
11150 if (IEM_IS_MODRM_REG_MODE(bRm)) \
11151 { \
11152 switch (pVCpu->iem.s.enmEffOpSize) \
11153 { \
11154 case IEMMODE_16BIT: \
11155 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11157 \
11158 IEM_MC_ARG(uint16_t, u16Src, 2); \
11159 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11160 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
11161 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11162 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11163 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
11164 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11165 \
11166 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11167 IEM_MC_END(); \
11168 break; \
11169 \
11170 case IEMMODE_32BIT: \
11171 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11173 \
11174 IEM_MC_ARG(uint32_t, u32Src, 2); \
11175 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11176 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
11177 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11178 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11179 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
11180 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11181 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11182 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11183 } IEM_MC_ENDIF(); \
11184 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11185 IEM_MC_END(); \
11186 break; \
11187 \
11188 case IEMMODE_64BIT: \
11189 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11191 \
11192 IEM_MC_ARG(uint64_t, u64Src, 2); \
11193 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11194 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
11195 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11196 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11197 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
11198 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11199 \
11200 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11201 IEM_MC_END(); \
11202 break; \
11203 \
11204 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11205 } \
11206 } \
11207 else \
11208 { \
11209 /* \
11210 * We're accessing memory. \
11211 */ \
11212 switch (pVCpu->iem.s.enmEffOpSize) \
11213 { \
11214 case IEMMODE_16BIT: \
11215 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11219 \
11220 IEM_MC_ARG(uint16_t, u16Src, 2); \
11221 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11222 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
11223 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11224 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11225 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
11226 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11227 \
11228 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11229 IEM_MC_END(); \
11230 break; \
11231 \
11232 case IEMMODE_32BIT: \
11233 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11237 \
11238 IEM_MC_ARG(uint32_t, u32Src, 2); \
11239 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11240 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
11241 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11242 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11243 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
11244 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11245 \
11246 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11247 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11248 } IEM_MC_ENDIF(); \
11249 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11250 IEM_MC_END(); \
11251 break; \
11252 \
11253 case IEMMODE_64BIT: \
11254 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11258 \
11259 IEM_MC_ARG(uint64_t, u64Src, 2); \
11260 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11261 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
11262 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11263 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11264 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
11265 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11266 \
11267 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11268 IEM_MC_END(); \
11269 break; \
11270 \
11271 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11272 } \
11273 } (void)0
11274
11275
11276/**
11277 * @opcode 0xbc
11278 * @oppfx !0xf3
11279 * @opfltest cf,pf,af,sf,of
11280 * @opflmodify cf,pf,af,zf,sf,of
11281 * @opflundef cf,pf,af,sf,of
11282 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11283 * document them as inputs. Sigh.
11284 */
11285FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11286{
11287 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11288 IEMOP_HLP_MIN_386();
11289 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11290 PCIEMOPBINSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags);
11291 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11292}
11293
11294
11295/**
11296 * @opcode 0xbc
11297 * @oppfx 0xf3
11298 * @opfltest pf,af,sf,of
11299 * @opflmodify cf,pf,af,zf,sf,of
11300 * @opflundef pf,af,sf,of
11301 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11302 * document them as inputs. Sigh.
11303 */
11304FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11305{
11306 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11307 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11308 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11309
11310#ifndef TST_IEM_CHECK_MC
11311 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11312 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11313 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11314 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11315 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11316 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11317 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11318 {
11319 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11320 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11321 };
11322#endif
11323 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11324 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11325 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11327 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, tzcnt, 0);
11328}
11329
11330
11331/**
11332 * @opcode 0xbd
11333 * @oppfx !0xf3
11334 * @opfltest cf,pf,af,sf,of
11335 * @opflmodify cf,pf,af,zf,sf,of
11336 * @opflundef cf,pf,af,sf,of
11337 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11338 * document them as inputs. Sigh.
11339 */
11340FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11341{
11342 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11343 IEMOP_HLP_MIN_386();
11344 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11345 PCIEMOPBINSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags);
11346 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11347}
11348
11349
11350/**
11351 * @opcode 0xbd
11352 * @oppfx 0xf3
11353 * @opfltest pf,af,sf,of
11354 * @opflmodify cf,pf,af,zf,sf,of
11355 * @opflundef pf,af,sf,of
11356 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11357 * document them as inputs. Sigh.
11358 */
11359FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11360{
11361 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAbm)
11362 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11363 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11364
11365#ifndef TST_IEM_CHECK_MC
11366 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11367 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11368 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11369 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11370 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11371 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11372 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11373 {
11374 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11375 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11376 };
11377#endif
11378 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11379 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11380 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11381 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11382 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, lzcnt, 0);
11383}
11384
11385
11386
11387/** Opcode 0x0f 0xbe. */
11388FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11389{
11390 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11391 IEMOP_HLP_MIN_386();
11392
11393 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11394
11395 /*
11396 * If rm is denoting a register, no more instruction bytes.
11397 */
11398 if (IEM_IS_MODRM_REG_MODE(bRm))
11399 {
11400 switch (pVCpu->iem.s.enmEffOpSize)
11401 {
11402 case IEMMODE_16BIT:
11403 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11405 IEM_MC_LOCAL(uint16_t, u16Value);
11406 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11407 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11408 IEM_MC_ADVANCE_RIP_AND_FINISH();
11409 IEM_MC_END();
11410 break;
11411
11412 case IEMMODE_32BIT:
11413 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11415 IEM_MC_LOCAL(uint32_t, u32Value);
11416 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11417 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11418 IEM_MC_ADVANCE_RIP_AND_FINISH();
11419 IEM_MC_END();
11420 break;
11421
11422 case IEMMODE_64BIT:
11423 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11425 IEM_MC_LOCAL(uint64_t, u64Value);
11426 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11427 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11428 IEM_MC_ADVANCE_RIP_AND_FINISH();
11429 IEM_MC_END();
11430 break;
11431
11432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11433 }
11434 }
11435 else
11436 {
11437 /*
11438 * We're loading a register from memory.
11439 */
11440 switch (pVCpu->iem.s.enmEffOpSize)
11441 {
11442 case IEMMODE_16BIT:
11443 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11444 IEM_MC_LOCAL(uint16_t, u16Value);
11445 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11448 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11449 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11450 IEM_MC_ADVANCE_RIP_AND_FINISH();
11451 IEM_MC_END();
11452 break;
11453
11454 case IEMMODE_32BIT:
11455 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11456 IEM_MC_LOCAL(uint32_t, u32Value);
11457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11460 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11461 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11462 IEM_MC_ADVANCE_RIP_AND_FINISH();
11463 IEM_MC_END();
11464 break;
11465
11466 case IEMMODE_64BIT:
11467 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11468 IEM_MC_LOCAL(uint64_t, u64Value);
11469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11472 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11473 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11474 IEM_MC_ADVANCE_RIP_AND_FINISH();
11475 IEM_MC_END();
11476 break;
11477
11478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11479 }
11480 }
11481}
11482
11483
11484/** Opcode 0x0f 0xbf. */
11485FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11486{
11487 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11488 IEMOP_HLP_MIN_386();
11489
11490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11491
11492 /** @todo Not entirely sure how the operand size prefix is handled here,
11493 * assuming that it will be ignored. Would be nice to have a few
11494 * test for this. */
11495 /*
11496 * If rm is denoting a register, no more instruction bytes.
11497 */
11498 if (IEM_IS_MODRM_REG_MODE(bRm))
11499 {
11500 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11501 {
11502 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11504 IEM_MC_LOCAL(uint32_t, u32Value);
11505 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11506 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11507 IEM_MC_ADVANCE_RIP_AND_FINISH();
11508 IEM_MC_END();
11509 }
11510 else
11511 {
11512 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11514 IEM_MC_LOCAL(uint64_t, u64Value);
11515 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11516 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11517 IEM_MC_ADVANCE_RIP_AND_FINISH();
11518 IEM_MC_END();
11519 }
11520 }
11521 else
11522 {
11523 /*
11524 * We're loading a register from memory.
11525 */
11526 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11527 {
11528 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11529 IEM_MC_LOCAL(uint32_t, u32Value);
11530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11533 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11534 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11535 IEM_MC_ADVANCE_RIP_AND_FINISH();
11536 IEM_MC_END();
11537 }
11538 else
11539 {
11540 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11541 IEM_MC_LOCAL(uint64_t, u64Value);
11542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11545 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11546 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11547 IEM_MC_ADVANCE_RIP_AND_FINISH();
11548 IEM_MC_END();
11549 }
11550 }
11551}
11552
11553
11554/**
11555 * @opcode 0xc0
11556 * @opflclass arithmetic
11557 */
11558FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11559{
11560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11561 IEMOP_HLP_MIN_486();
11562 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11563
11564 /*
11565 * If rm is denoting a register, no more instruction bytes.
11566 */
11567 if (IEM_IS_MODRM_REG_MODE(bRm))
11568 {
11569 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11571 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11572 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11573 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11574
11575 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11576 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11577 IEM_MC_REF_EFLAGS(pEFlags);
11578 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11579
11580 IEM_MC_ADVANCE_RIP_AND_FINISH();
11581 IEM_MC_END();
11582 }
11583 else
11584 {
11585 /*
11586 * We're accessing memory.
11587 */
11588#define IEMOP_BODY_XADD_BYTE(a_fnWorker, a_Type) \
11589 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11592 IEMOP_HLP_DONE_DECODING(); \
11593 \
11594 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11595 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11596 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11597 \
11598 IEM_MC_LOCAL(uint8_t, u8RegCopy); \
11599 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11600 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, u8RegCopy, 1); \
11601 \
11602 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11603 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker, pu8Dst, pu8Reg, pEFlags); \
11604 \
11605 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11606 IEM_MC_COMMIT_EFLAGS(EFlags); \
11607 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy); \
11608 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11609 IEM_MC_END()
11610 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11611 {
11612 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8,RW);
11613 }
11614 else
11615 {
11616 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8_locked,ATOMIC);
11617 }
11618 }
11619}
11620
11621
11622/**
11623 * @opcode 0xc1
11624 * @opflclass arithmetic
11625 */
11626FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11627{
11628 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11629 IEMOP_HLP_MIN_486();
11630 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11631
11632 /*
11633 * If rm is denoting a register, no more instruction bytes.
11634 */
11635 if (IEM_IS_MODRM_REG_MODE(bRm))
11636 {
11637 switch (pVCpu->iem.s.enmEffOpSize)
11638 {
11639 case IEMMODE_16BIT:
11640 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11642 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11643 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11644 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11645
11646 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11647 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11648 IEM_MC_REF_EFLAGS(pEFlags);
11649 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11650
11651 IEM_MC_ADVANCE_RIP_AND_FINISH();
11652 IEM_MC_END();
11653 break;
11654
11655 case IEMMODE_32BIT:
11656 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11658 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11659 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11660 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11661
11662 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11663 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11664 IEM_MC_REF_EFLAGS(pEFlags);
11665 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11666
11667 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
11668 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11669 IEM_MC_ADVANCE_RIP_AND_FINISH();
11670 IEM_MC_END();
11671 break;
11672
11673 case IEMMODE_64BIT:
11674 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11676 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11677 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11678 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11679
11680 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11681 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11682 IEM_MC_REF_EFLAGS(pEFlags);
11683 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11684
11685 IEM_MC_ADVANCE_RIP_AND_FINISH();
11686 IEM_MC_END();
11687 break;
11688
11689 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11690 }
11691 }
11692 else
11693 {
11694 /*
11695 * We're accessing memory.
11696 */
11697#define IEMOP_BODY_XADD_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
11698 do { \
11699 switch (pVCpu->iem.s.enmEffOpSize) \
11700 { \
11701 case IEMMODE_16BIT: \
11702 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11705 IEMOP_HLP_DONE_DECODING(); \
11706 \
11707 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11708 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11709 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11710 \
11711 IEM_MC_LOCAL(uint16_t, u16RegCopy); \
11712 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11713 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, u16RegCopy, 1); \
11714 \
11715 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11716 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker16, pu16Dst, pu16Reg, pEFlags); \
11717 \
11718 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11719 IEM_MC_COMMIT_EFLAGS(EFlags); \
11720 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy); \
11721 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11722 IEM_MC_END(); \
11723 break; \
11724 \
11725 case IEMMODE_32BIT: \
11726 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11729 IEMOP_HLP_DONE_DECODING(); \
11730 \
11731 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11732 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11733 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11734 \
11735 IEM_MC_LOCAL(uint32_t, u32RegCopy); \
11736 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11737 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, u32RegCopy, 1); \
11738 \
11739 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11740 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker32, pu32Dst, pu32Reg, pEFlags); \
11741 \
11742 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11743 IEM_MC_COMMIT_EFLAGS(EFlags); \
11744 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy); \
11745 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11746 IEM_MC_END(); \
11747 break; \
11748 \
11749 case IEMMODE_64BIT: \
11750 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11753 IEMOP_HLP_DONE_DECODING(); \
11754 \
11755 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11756 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11757 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11758 \
11759 IEM_MC_LOCAL(uint64_t, u64RegCopy); \
11760 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11761 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, u64RegCopy, 1); \
11762 \
11763 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11764 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker64, pu64Dst, pu64Reg, pEFlags); \
11765 \
11766 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11767 IEM_MC_COMMIT_EFLAGS(EFlags); \
11768 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy); \
11769 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11770 IEM_MC_END(); \
11771 break; \
11772 \
11773 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11774 } \
11775 } while (0)
11776
11777 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11778 {
11779 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16, iemAImpl_xadd_u32, iemAImpl_xadd_u64,RW);
11780 }
11781 else
11782 {
11783 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16_locked, iemAImpl_xadd_u32_locked, iemAImpl_xadd_u64_locked,ATOMIC);
11784 }
11785 }
11786}
11787
11788
11789/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11790FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11791{
11792 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11793
11794 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11795 if (IEM_IS_MODRM_REG_MODE(bRm))
11796 {
11797 /*
11798 * XMM, XMM.
11799 */
11800 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11801 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11803 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11804 IEM_MC_LOCAL(X86XMMREG, Dst);
11805 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11806 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11807 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11808 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11809 IEM_MC_PREPARE_SSE_USAGE();
11810 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11811 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpps_u128, pDst, pSrc, bImmArg);
11812 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11813
11814 IEM_MC_ADVANCE_RIP_AND_FINISH();
11815 IEM_MC_END();
11816 }
11817 else
11818 {
11819 /*
11820 * XMM, [mem128].
11821 */
11822 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11823 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11824 IEM_MC_LOCAL(X86XMMREG, Dst);
11825 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11826 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11828
11829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11830 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11831 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11833 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11834 IEM_MC_PREPARE_SSE_USAGE();
11835
11836 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11837 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpps_u128, pDst, pSrc, bImmArg);
11838 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11839
11840 IEM_MC_ADVANCE_RIP_AND_FINISH();
11841 IEM_MC_END();
11842 }
11843}
11844
11845
11846/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11847FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11848{
11849 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11850
11851 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11852 if (IEM_IS_MODRM_REG_MODE(bRm))
11853 {
11854 /*
11855 * XMM, XMM.
11856 */
11857 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11858 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11860 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11861 IEM_MC_LOCAL(X86XMMREG, Dst);
11862 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11863 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11864 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11865 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11866 IEM_MC_PREPARE_SSE_USAGE();
11867 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11868 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmppd_u128, pDst, pSrc, bImmArg);
11869 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11870
11871 IEM_MC_ADVANCE_RIP_AND_FINISH();
11872 IEM_MC_END();
11873 }
11874 else
11875 {
11876 /*
11877 * XMM, [mem128].
11878 */
11879 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11880 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11881 IEM_MC_LOCAL(X86XMMREG, Dst);
11882 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11883 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11885
11886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11887 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11888 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11890 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11891 IEM_MC_PREPARE_SSE_USAGE();
11892
11893 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11894 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmppd_u128, pDst, pSrc, bImmArg);
11895 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11896
11897 IEM_MC_ADVANCE_RIP_AND_FINISH();
11898 IEM_MC_END();
11899 }
11900}
11901
11902
11903/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11904FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11905{
11906 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11907
11908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11909 if (IEM_IS_MODRM_REG_MODE(bRm))
11910 {
11911 /*
11912 * XMM32, XMM32.
11913 */
11914 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11915 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11917 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11918 IEM_MC_LOCAL(X86XMMREG, Dst);
11919 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11920 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11921 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11922 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11923 IEM_MC_PREPARE_SSE_USAGE();
11924 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11925 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpss_u128, pDst, pSrc, bImmArg);
11926 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11927
11928 IEM_MC_ADVANCE_RIP_AND_FINISH();
11929 IEM_MC_END();
11930 }
11931 else
11932 {
11933 /*
11934 * XMM32, [mem32].
11935 */
11936 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11937 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11938 IEM_MC_LOCAL(X86XMMREG, Dst);
11939 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11940 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11942
11943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11944 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11945 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11947 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11948 IEM_MC_PREPARE_SSE_USAGE();
11949
11950 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
11951 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11952 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpss_u128, pDst, pSrc, bImmArg);
11953 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11954
11955 IEM_MC_ADVANCE_RIP_AND_FINISH();
11956 IEM_MC_END();
11957 }
11958}
11959
11960
11961/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11962FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11963{
11964 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11965
11966 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11967 if (IEM_IS_MODRM_REG_MODE(bRm))
11968 {
11969 /*
11970 * XMM64, XMM64.
11971 */
11972 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11973 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11975 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11976 IEM_MC_LOCAL(X86XMMREG, Dst);
11977 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11978 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11979 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11980 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11981 IEM_MC_PREPARE_SSE_USAGE();
11982 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11983 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpsd_u128, pDst, pSrc, bImmArg);
11984 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11985
11986 IEM_MC_ADVANCE_RIP_AND_FINISH();
11987 IEM_MC_END();
11988 }
11989 else
11990 {
11991 /*
11992 * XMM64, [mem64].
11993 */
11994 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11995 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11996 IEM_MC_LOCAL(X86XMMREG, Dst);
11997 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11998 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12000
12001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12002 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12003 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12005 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12006 IEM_MC_PREPARE_SSE_USAGE();
12007
12008 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
12009 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12010 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpsd_u128, pDst, pSrc, bImmArg);
12011 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
12012
12013 IEM_MC_ADVANCE_RIP_AND_FINISH();
12014 IEM_MC_END();
12015 }
12016}
12017
12018
12019/** Opcode 0x0f 0xc3. */
12020FNIEMOP_DEF(iemOp_movnti_My_Gy)
12021{
12022 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
12023
12024 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12025
12026 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
12027 if (IEM_IS_MODRM_MEM_MODE(bRm))
12028 {
12029 switch (pVCpu->iem.s.enmEffOpSize)
12030 {
12031 case IEMMODE_32BIT:
12032 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
12033 IEM_MC_LOCAL(uint32_t, u32Value);
12034 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12035
12036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12038
12039 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12040 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
12041 IEM_MC_ADVANCE_RIP_AND_FINISH();
12042 IEM_MC_END();
12043 break;
12044
12045 case IEMMODE_64BIT:
12046 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
12047 IEM_MC_LOCAL(uint64_t, u64Value);
12048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12049
12050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12052
12053 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12054 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
12055 IEM_MC_ADVANCE_RIP_AND_FINISH();
12056 IEM_MC_END();
12057 break;
12058
12059 case IEMMODE_16BIT:
12060 /** @todo check this form. */
12061 IEMOP_RAISE_INVALID_OPCODE_RET();
12062
12063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12064 }
12065 }
12066 else
12067 IEMOP_RAISE_INVALID_OPCODE_RET();
12068}
12069
12070
12071/* Opcode 0x66 0x0f 0xc3 - invalid */
12072/* Opcode 0xf3 0x0f 0xc3 - invalid */
12073/* Opcode 0xf2 0x0f 0xc3 - invalid */
12074
12075
12076/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
12077FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
12078{
12079 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12081 if (IEM_IS_MODRM_REG_MODE(bRm))
12082 {
12083 /*
12084 * Register, register.
12085 */
12086 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12087 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12088 IEM_MC_LOCAL(uint16_t, uValue);
12089
12090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12091 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12092 IEM_MC_PREPARE_FPU_USAGE();
12093 IEM_MC_FPU_TO_MMX_MODE();
12094
12095 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
12096 IEM_MC_STORE_MREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
12097
12098 IEM_MC_ADVANCE_RIP_AND_FINISH();
12099 IEM_MC_END();
12100 }
12101 else
12102 {
12103 /*
12104 * Register, memory.
12105 */
12106 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12108 IEM_MC_LOCAL(uint16_t, uValue);
12109
12110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12111 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12113 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12114 IEM_MC_PREPARE_FPU_USAGE();
12115
12116 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12117 IEM_MC_FPU_TO_MMX_MODE();
12118 IEM_MC_STORE_MREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
12119
12120 IEM_MC_ADVANCE_RIP_AND_FINISH();
12121 IEM_MC_END();
12122 }
12123}
12124
12125
12126/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
12127FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
12128{
12129 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12130 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12131 if (IEM_IS_MODRM_REG_MODE(bRm))
12132 {
12133 /*
12134 * Register, register.
12135 */
12136 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12137 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12139
12140 IEM_MC_LOCAL(uint16_t, uValue);
12141 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12142 IEM_MC_PREPARE_SSE_USAGE();
12143
12144 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
12145 IEM_MC_STORE_XREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
12146 IEM_MC_ADVANCE_RIP_AND_FINISH();
12147 IEM_MC_END();
12148 }
12149 else
12150 {
12151 /*
12152 * Register, memory.
12153 */
12154 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12156 IEM_MC_LOCAL(uint16_t, uValue);
12157
12158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12159 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12161 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12162 IEM_MC_PREPARE_SSE_USAGE();
12163
12164 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12165 IEM_MC_STORE_XREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
12166 IEM_MC_ADVANCE_RIP_AND_FINISH();
12167 IEM_MC_END();
12168 }
12169}
12170
12171
12172/* Opcode 0xf3 0x0f 0xc4 - invalid */
12173/* Opcode 0xf2 0x0f 0xc4 - invalid */
12174
12175
12176/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
12177FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
12178{
12179 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
12180 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12181 if (IEM_IS_MODRM_REG_MODE(bRm))
12182 {
12183 /*
12184 * Greg32, MMX, imm8.
12185 */
12186 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12187 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12189 IEM_MC_LOCAL(uint16_t, uValue);
12190 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12191 IEM_MC_PREPARE_FPU_USAGE();
12192 IEM_MC_FPU_TO_MMX_MODE();
12193 IEM_MC_FETCH_MREG_U16(uValue, IEM_GET_MODRM_RM_8(bRm), bImm & 3);
12194 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
12195 IEM_MC_ADVANCE_RIP_AND_FINISH();
12196 IEM_MC_END();
12197 }
12198 /* No memory operand. */
12199 else
12200 IEMOP_RAISE_INVALID_OPCODE_RET();
12201}
12202
12203
12204/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12205FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12206{
12207 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12208 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12209 if (IEM_IS_MODRM_REG_MODE(bRm))
12210 {
12211 /*
12212 * Greg32, XMM, imm8.
12213 */
12214 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12215 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12217 IEM_MC_LOCAL(uint16_t, uValue);
12218 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12219 IEM_MC_PREPARE_SSE_USAGE();
12220 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
12221 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
12222 IEM_MC_ADVANCE_RIP_AND_FINISH();
12223 IEM_MC_END();
12224 }
12225 /* No memory operand. */
12226 else
12227 IEMOP_RAISE_INVALID_OPCODE_RET();
12228}
12229
12230
12231/* Opcode 0xf3 0x0f 0xc5 - invalid */
12232/* Opcode 0xf2 0x0f 0xc5 - invalid */
12233
12234
12235/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12236FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12237{
12238 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12239 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12240 if (IEM_IS_MODRM_REG_MODE(bRm))
12241 {
12242 /*
12243 * XMM, XMM, imm8.
12244 */
12245 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12246 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12248 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12249 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12250 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12251 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12252 IEM_MC_PREPARE_SSE_USAGE();
12253 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12254 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12255 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12256 IEM_MC_ADVANCE_RIP_AND_FINISH();
12257 IEM_MC_END();
12258 }
12259 else
12260 {
12261 /*
12262 * XMM, [mem128], imm8.
12263 */
12264 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12265 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12266 IEM_MC_LOCAL(RTUINT128U, uSrc);
12267 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12269
12270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12271 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12272 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12274 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12275 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12276
12277 IEM_MC_PREPARE_SSE_USAGE();
12278 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12279 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12280
12281 IEM_MC_ADVANCE_RIP_AND_FINISH();
12282 IEM_MC_END();
12283 }
12284}
12285
12286
12287/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12288FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12289{
12290 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12292 if (IEM_IS_MODRM_REG_MODE(bRm))
12293 {
12294 /*
12295 * XMM, XMM, imm8.
12296 */
12297 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12298 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12300 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12301 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12302 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12303 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12304 IEM_MC_PREPARE_SSE_USAGE();
12305 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12306 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12307 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12308 IEM_MC_ADVANCE_RIP_AND_FINISH();
12309 IEM_MC_END();
12310 }
12311 else
12312 {
12313 /*
12314 * XMM, [mem128], imm8.
12315 */
12316 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12317 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12318 IEM_MC_LOCAL(RTUINT128U, uSrc);
12319 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12321
12322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12323 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12324 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12326 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12327 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12328
12329 IEM_MC_PREPARE_SSE_USAGE();
12330 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12331 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12332
12333 IEM_MC_ADVANCE_RIP_AND_FINISH();
12334 IEM_MC_END();
12335 }
12336}
12337
12338
12339/* Opcode 0xf3 0x0f 0xc6 - invalid */
12340/* Opcode 0xf2 0x0f 0xc6 - invalid */
12341
12342
12343/**
12344 * @opmaps grp9
12345 * @opcode /1
12346 * @opcodesub !11 mr/reg rex.w=0
12347 * @oppfx n/a
12348 * @opflmodify zf
12349 */
12350FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12351{
12352 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12353#define IEMOP_BODY_CMPXCHG8B(a_fnWorker, a_Type) \
12354 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
12355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12357 IEMOP_HLP_DONE_DECODING_EX(fCmpXchg8b); \
12358 \
12359 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12360 IEM_MC_ARG(uint64_t *, pu64MemDst, 0); \
12361 IEM_MC_MEM_MAP_U64_##a_Type(pu64MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12362 \
12363 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx); \
12364 IEM_MC_FETCH_GREG_PAIR_U32(u64EaxEdx, X86_GREG_xAX, X86_GREG_xDX); \
12365 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EaxEdx, u64EaxEdx, 1); \
12366 \
12367 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx); \
12368 IEM_MC_FETCH_GREG_PAIR_U32(u64EbxEcx, X86_GREG_xBX, X86_GREG_xCX); \
12369 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EbxEcx, u64EbxEcx, 2); \
12370 \
12371 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
12372 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags); \
12373 \
12374 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12375 IEM_MC_COMMIT_EFLAGS(EFlags); \
12376 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12377 IEM_MC_STORE_GREG_PAIR_U32(X86_GREG_xAX, X86_GREG_xDX, u64EaxEdx); \
12378 } IEM_MC_ENDIF(); \
12379 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12380 \
12381 IEM_MC_END()
12382 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12383 {
12384 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b,RW);
12385 }
12386 else
12387 {
12388 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b_locked,ATOMIC);
12389 }
12390}
12391
12392
12393/**
12394 * @opmaps grp9
12395 * @opcode /1
12396 * @opcodesub !11 mr/reg rex.w=1
12397 * @oppfx n/a
12398 * @opflmodify zf
12399 */
12400FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12401{
12402 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12403 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12404 {
12405 /*
12406 * This is hairy, very hairy macro fun. We're walking a fine line
12407 * here to make the code parsable by IEMAllInstPython.py and fit into
12408 * the patterns IEMAllThrdPython.py requires for the code morphing.
12409 */
12410#define BODY_CMPXCHG16B_HEAD(bUnmapInfoStmt, a_Type) \
12411 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
12412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12414 IEMOP_HLP_DONE_DECODING(); \
12415 \
12416 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16); \
12417 bUnmapInfoStmt; \
12418 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0); \
12419 IEM_MC_MEM_MAP_U128_##a_Type(pu128MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12420 \
12421 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx); \
12422 IEM_MC_FETCH_GREG_PAIR_U64(u128RaxRdx, X86_GREG_xAX, X86_GREG_xDX); \
12423 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RaxRdx, u128RaxRdx, 1); \
12424 \
12425 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx); \
12426 IEM_MC_FETCH_GREG_PAIR_U64(u128RbxRcx, X86_GREG_xBX, X86_GREG_xCX); \
12427 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RbxRcx, u128RbxRcx, 2); \
12428 \
12429 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3)
12430
12431#define BODY_CMPXCHG16B_TAIL(a_Type) \
12432 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12433 IEM_MC_COMMIT_EFLAGS(EFlags); \
12434 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12435 IEM_MC_STORE_GREG_PAIR_U64(X86_GREG_xAX, X86_GREG_xDX, u128RaxRdx); \
12436 } IEM_MC_ENDIF(); \
12437 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12438 IEM_MC_END()
12439
12440#ifdef RT_ARCH_AMD64
12441 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12442 {
12443 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12444 {
12445 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12446 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12447 BODY_CMPXCHG16B_TAIL(RW);
12448 }
12449 else
12450 {
12451 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12452 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12453 BODY_CMPXCHG16B_TAIL(ATOMIC);
12454 }
12455 }
12456 else
12457 { /* (see comments in #else case below) */
12458 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12459 {
12460 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12461 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12462 BODY_CMPXCHG16B_TAIL(RW);
12463 }
12464 else
12465 {
12466 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12467 IEM_MC_CALL_CIMPL_5(IEM_CIMPL_F_STATUS_FLAGS,
12468 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12469 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12470 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx,
12471 pEFlags, bUnmapInfo);
12472 IEM_MC_END();
12473 }
12474 }
12475
12476#elif defined(RT_ARCH_ARM64)
12477 /** @todo may require fallback for unaligned accesses... */
12478 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12479 {
12480 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12481 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12482 BODY_CMPXCHG16B_TAIL(RW);
12483 }
12484 else
12485 {
12486 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12487 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12488 BODY_CMPXCHG16B_TAIL(ATOMIC);
12489 }
12490
12491#else
12492 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12493 accesses and not all all atomic, which works fine on in UNI CPU guest
12494 configuration (ignoring DMA). If guest SMP is active we have no choice
12495 but to use a rendezvous callback here. Sigh. */
12496 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12497 {
12498 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12499 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12500 BODY_CMPXCHG16B_TAIL(RW);
12501 }
12502 else
12503 {
12504 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12505 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12506 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12507 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12508 iemCImpl_cmpxchg16b_fallback_rendezvous,
12509 pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12510 IEM_MC_END();
12511 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12512 }
12513#endif
12514
12515#undef BODY_CMPXCHG16B
12516 }
12517 Log(("cmpxchg16b -> #UD\n"));
12518 IEMOP_RAISE_INVALID_OPCODE_RET();
12519}
12520
12521FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12522{
12523 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12524 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12525 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12526}
12527
12528
12529/** Opcode 0x0f 0xc7 11/6. */
12530FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12531{
12532 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12533 IEMOP_RAISE_INVALID_OPCODE_RET();
12534
12535 if (IEM_IS_MODRM_REG_MODE(bRm))
12536 {
12537 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12539 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12540 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12541 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12542 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12543 iemCImpl_rdrand, iReg, enmEffOpSize);
12544 IEM_MC_END();
12545 }
12546 /* Register only. */
12547 else
12548 IEMOP_RAISE_INVALID_OPCODE_RET();
12549}
12550
12551/** Opcode 0x0f 0xc7 !11/6. */
12552#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12553FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12554{
12555 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12556 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12557 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12558 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12559 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12561 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12562 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12563 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12564 IEM_MC_END();
12565}
12566#else
12567FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12568#endif
12569
12570/** Opcode 0x66 0x0f 0xc7 !11/6. */
12571#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12572FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12573{
12574 IEMOP_MNEMONIC(vmclear, "vmclear");
12575 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12576 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12577 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12578 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12580 IEMOP_HLP_DONE_DECODING();
12581 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12582 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12583 IEM_MC_END();
12584}
12585#else
12586FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12587#endif
12588
12589/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12590#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12591FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12592{
12593 IEMOP_MNEMONIC(vmxon, "vmxon");
12594 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12595 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12596 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12598 IEMOP_HLP_DONE_DECODING();
12599 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12600 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12601 IEM_MC_END();
12602}
12603#else
12604FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12605#endif
12606
12607/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12608#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12609FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12610{
12611 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12612 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12613 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12614 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12615 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12617 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12618 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12619 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12620 IEM_MC_END();
12621}
12622#else
12623FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12624#endif
12625
12626/** Opcode 0x0f 0xc7 11/7. */
12627FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12628{
12629 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12630 IEMOP_RAISE_INVALID_OPCODE_RET();
12631
12632 if (IEM_IS_MODRM_REG_MODE(bRm))
12633 {
12634 /* register destination. */
12635 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12637 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12638 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12639 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12640 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12641 iemCImpl_rdseed, iReg, enmEffOpSize);
12642 IEM_MC_END();
12643 }
12644 /* Register only. */
12645 else
12646 IEMOP_RAISE_INVALID_OPCODE_RET();
12647}
12648
12649/**
12650 * Group 9 jump table for register variant.
12651 */
12652IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12653{ /* pfx: none, 066h, 0f3h, 0f2h */
12654 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12655 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12656 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12657 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12658 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12659 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12660 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12661 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12662};
12663AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12664
12665
12666/**
12667 * Group 9 jump table for memory variant.
12668 */
12669IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12670{ /* pfx: none, 066h, 0f3h, 0f2h */
12671 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12672 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12673 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12674 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12675 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12676 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12677 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12678 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12679};
12680AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12681
12682
12683/** Opcode 0x0f 0xc7. */
12684FNIEMOP_DEF(iemOp_Grp9)
12685{
12686 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12687 if (IEM_IS_MODRM_REG_MODE(bRm))
12688 /* register, register */
12689 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12690 + pVCpu->iem.s.idxPrefix], bRm);
12691 /* memory, register */
12692 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12693 + pVCpu->iem.s.idxPrefix], bRm);
12694}
12695
12696
12697/**
12698 * Common 'bswap register' helper.
12699 */
12700FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12701{
12702 switch (pVCpu->iem.s.enmEffOpSize)
12703 {
12704 case IEMMODE_16BIT:
12705 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
12706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12707 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12708 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12709 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12710 IEM_MC_ADVANCE_RIP_AND_FINISH();
12711 IEM_MC_END();
12712 break;
12713
12714 case IEMMODE_32BIT:
12715 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
12716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12717 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12718 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12719 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12720 IEM_MC_CLEAR_HIGH_GREG_U64(iReg);
12721 IEM_MC_ADVANCE_RIP_AND_FINISH();
12722 IEM_MC_END();
12723 break;
12724
12725 case IEMMODE_64BIT:
12726 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
12727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12728 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12729 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12730 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12731 IEM_MC_ADVANCE_RIP_AND_FINISH();
12732 IEM_MC_END();
12733 break;
12734
12735 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12736 }
12737}
12738
12739
12740/** Opcode 0x0f 0xc8. */
12741FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12742{
12743 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12744 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12745 prefix. REX.B is the correct prefix it appears. For a parallel
12746 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12747 IEMOP_HLP_MIN_486();
12748 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12749}
12750
12751
12752/** Opcode 0x0f 0xc9. */
12753FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12754{
12755 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12756 IEMOP_HLP_MIN_486();
12757 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12758}
12759
12760
12761/** Opcode 0x0f 0xca. */
12762FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12763{
12764 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12765 IEMOP_HLP_MIN_486();
12766 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12767}
12768
12769
12770/** Opcode 0x0f 0xcb. */
12771FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12772{
12773 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12774 IEMOP_HLP_MIN_486();
12775 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12776}
12777
12778
12779/** Opcode 0x0f 0xcc. */
12780FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12781{
12782 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12783 IEMOP_HLP_MIN_486();
12784 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12785}
12786
12787
12788/** Opcode 0x0f 0xcd. */
12789FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12790{
12791 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12792 IEMOP_HLP_MIN_486();
12793 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12794}
12795
12796
12797/** Opcode 0x0f 0xce. */
12798FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12799{
12800 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12801 IEMOP_HLP_MIN_486();
12802 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12803}
12804
12805
12806/** Opcode 0x0f 0xcf. */
12807FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12808{
12809 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12810 IEMOP_HLP_MIN_486();
12811 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12812}
12813
12814
12815/* Opcode 0x0f 0xd0 - invalid */
12816
12817
12818/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12819FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12820{
12821 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12822 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12823}
12824
12825
12826/* Opcode 0xf3 0x0f 0xd0 - invalid */
12827
12828
12829/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12830FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12831{
12832 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12833 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12834}
12835
12836
12837
12838/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12839FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12840{
12841 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12842 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12843}
12844
12845/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12846FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12847{
12848 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12849 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12850}
12851
12852/* Opcode 0xf3 0x0f 0xd1 - invalid */
12853/* Opcode 0xf2 0x0f 0xd1 - invalid */
12854
12855/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12856FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12857{
12858 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12859 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12860}
12861
12862
12863/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12864FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12865{
12866 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12867 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12868}
12869
12870
12871/* Opcode 0xf3 0x0f 0xd2 - invalid */
12872/* Opcode 0xf2 0x0f 0xd2 - invalid */
12873
12874/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12875FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12876{
12877 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12878 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12879}
12880
12881
12882/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12883FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12884{
12885 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12886 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12887}
12888
12889
12890/* Opcode 0xf3 0x0f 0xd3 - invalid */
12891/* Opcode 0xf2 0x0f 0xd3 - invalid */
12892
12893
12894/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12895FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12896{
12897 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12898 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
12899}
12900
12901
12902/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12903FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12904{
12905 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12906 SSE2_OPT_BODY_FullFull_To_Full(paddq, iemAImpl_paddq_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
12907}
12908
12909
12910/* Opcode 0xf3 0x0f 0xd4 - invalid */
12911/* Opcode 0xf2 0x0f 0xd4 - invalid */
12912
12913/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12914FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12915{
12916 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12917 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmullw_u64);
12918}
12919
12920/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12921FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12922{
12923 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12924 SSE2_OPT_BODY_FullFull_To_Full(pmullw, iemAImpl_pmullw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
12925}
12926
12927
12928/* Opcode 0xf3 0x0f 0xd5 - invalid */
12929/* Opcode 0xf2 0x0f 0xd5 - invalid */
12930
12931/* Opcode 0x0f 0xd6 - invalid */
12932
12933/**
12934 * @opcode 0xd6
12935 * @oppfx 0x66
12936 * @opcpuid sse2
12937 * @opgroup og_sse2_pcksclr_datamove
12938 * @opxcpttype none
12939 * @optest op1=-1 op2=2 -> op1=2
12940 * @optest op1=0 op2=-42 -> op1=-42
12941 */
12942FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12943{
12944 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12945 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12946 if (IEM_IS_MODRM_REG_MODE(bRm))
12947 {
12948 /*
12949 * Register, register.
12950 */
12951 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12953 IEM_MC_LOCAL(uint64_t, uSrc);
12954
12955 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12956 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12957
12958 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12959 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12960
12961 IEM_MC_ADVANCE_RIP_AND_FINISH();
12962 IEM_MC_END();
12963 }
12964 else
12965 {
12966 /*
12967 * Memory, register.
12968 */
12969 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12970 IEM_MC_LOCAL(uint64_t, uSrc);
12971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12972
12973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12975 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12976 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12977
12978 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12979 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12980
12981 IEM_MC_ADVANCE_RIP_AND_FINISH();
12982 IEM_MC_END();
12983 }
12984}
12985
12986
12987/**
12988 * @opcode 0xd6
12989 * @opcodesub 11 mr/reg
12990 * @oppfx f3
12991 * @opcpuid sse2
12992 * @opgroup og_sse2_simdint_datamove
12993 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12994 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12995 */
12996FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12997{
12998 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12999 if (IEM_IS_MODRM_REG_MODE(bRm))
13000 {
13001 /*
13002 * Register, register.
13003 */
13004 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13005 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13007 IEM_MC_LOCAL(uint64_t, uSrc);
13008
13009 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13010 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13011 IEM_MC_FPU_TO_MMX_MODE();
13012
13013 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
13014 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
13015
13016 IEM_MC_ADVANCE_RIP_AND_FINISH();
13017 IEM_MC_END();
13018 }
13019
13020 /**
13021 * @opdone
13022 * @opmnemonic udf30fd6mem
13023 * @opcode 0xd6
13024 * @opcodesub !11 mr/reg
13025 * @oppfx f3
13026 * @opunused intel-modrm
13027 * @opcpuid sse
13028 * @optest ->
13029 */
13030 else
13031 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13032}
13033
13034
13035/**
13036 * @opcode 0xd6
13037 * @opcodesub 11 mr/reg
13038 * @oppfx f2
13039 * @opcpuid sse2
13040 * @opgroup og_sse2_simdint_datamove
13041 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13042 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13043 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
13044 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
13045 * @optest op1=-42 op2=0xfedcba9876543210
13046 * -> op1=0xfedcba9876543210 ftw=0xff
13047 */
13048FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
13049{
13050 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13051 if (IEM_IS_MODRM_REG_MODE(bRm))
13052 {
13053 /*
13054 * Register, register.
13055 */
13056 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13057 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13059 IEM_MC_LOCAL(uint64_t, uSrc);
13060
13061 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13062 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13063 IEM_MC_FPU_TO_MMX_MODE();
13064
13065 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
13066 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
13067
13068 IEM_MC_ADVANCE_RIP_AND_FINISH();
13069 IEM_MC_END();
13070 }
13071
13072 /**
13073 * @opdone
13074 * @opmnemonic udf20fd6mem
13075 * @opcode 0xd6
13076 * @opcodesub !11 mr/reg
13077 * @oppfx f2
13078 * @opunused intel-modrm
13079 * @opcpuid sse
13080 * @optest ->
13081 */
13082 else
13083 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13084}
13085
13086
13087/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
13088FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
13089{
13090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13091 /* Docs says register only. */
13092 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13093 {
13094 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13095 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
13096 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13098 IEM_MC_ARG(uint64_t *, puDst, 0);
13099 IEM_MC_ARG(uint64_t const *, puSrc, 1);
13100 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13101 IEM_MC_PREPARE_FPU_USAGE();
13102 IEM_MC_FPU_TO_MMX_MODE();
13103
13104 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13105 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
13106 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
13107
13108 IEM_MC_ADVANCE_RIP_AND_FINISH();
13109 IEM_MC_END();
13110 }
13111 else
13112 IEMOP_RAISE_INVALID_OPCODE_RET();
13113}
13114
13115
13116/** Opcode 0x66 0x0f 0xd7 - */
13117FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
13118{
13119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13120 /* Docs says register only. */
13121 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13122 {
13123 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13124 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
13125 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13127 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13128 IEM_MC_PREPARE_SSE_USAGE();
13129 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
13130 IEM_MC_LIVENESS_GREG_CLOBBER(IEM_GET_MODRM_REG(pVCpu, bRm));
13131 IEM_MC_LIVENESS_XREG_INPUT(IEM_GET_MODRM_RM(pVCpu, bRm));
13132 IEM_MC_NATIVE_EMIT_2(iemNativeEmit_pmovmskb_rr_u128, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
13133 } IEM_MC_NATIVE_ELSE() {
13134 IEM_MC_ARG(uint64_t *, puDst, 0);
13135 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
13136 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13137 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
13138 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
13139 } IEM_MC_NATIVE_ENDIF();
13140 IEM_MC_ADVANCE_RIP_AND_FINISH();
13141 IEM_MC_END();
13142 }
13143 else
13144 IEMOP_RAISE_INVALID_OPCODE_RET();
13145}
13146
13147
13148/* Opcode 0xf3 0x0f 0xd7 - invalid */
13149/* Opcode 0xf2 0x0f 0xd7 - invalid */
13150
13151
13152/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
13153FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
13154{
13155 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13156 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubusb_u64);
13157}
13158
13159
13160/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
13161FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
13162{
13163 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13164 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubusb_u128);
13165}
13166
13167
13168/* Opcode 0xf3 0x0f 0xd8 - invalid */
13169/* Opcode 0xf2 0x0f 0xd8 - invalid */
13170
13171/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13172FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13173{
13174 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13175 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubusw_u64);
13176}
13177
13178
13179/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13180FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13181{
13182 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13183 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubusw_u128);
13184}
13185
13186
13187/* Opcode 0xf3 0x0f 0xd9 - invalid */
13188/* Opcode 0xf2 0x0f 0xd9 - invalid */
13189
13190/** Opcode 0x0f 0xda - pminub Pq, Qq */
13191FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13192{
13193 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13194 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pminub_u64);
13195}
13196
13197
13198/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13199FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13200{
13201 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13202 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pminub_u128);
13203}
13204
13205/* Opcode 0xf3 0x0f 0xda - invalid */
13206/* Opcode 0xf2 0x0f 0xda - invalid */
13207
13208/** Opcode 0x0f 0xdb - pand Pq, Qq */
13209FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13210{
13211 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13212 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pand_u64);
13213}
13214
13215
13216/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13217FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13218{
13219 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13220 SSE2_OPT_BODY_FullFull_To_Full(pand, iemAImpl_pand_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13221}
13222
13223
13224/* Opcode 0xf3 0x0f 0xdb - invalid */
13225/* Opcode 0xf2 0x0f 0xdb - invalid */
13226
13227/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13228FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13229{
13230 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13231 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddusb_u64);
13232}
13233
13234
13235/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13236FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13237{
13238 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13239 SSE2_OPT_BODY_FullFull_To_Full(paddusb, iemAImpl_paddusb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13240}
13241
13242
13243/* Opcode 0xf3 0x0f 0xdc - invalid */
13244/* Opcode 0xf2 0x0f 0xdc - invalid */
13245
13246/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13247FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13248{
13249 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13250 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddusw_u64);
13251}
13252
13253
13254/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13255FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13256{
13257 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13258 SSE2_OPT_BODY_FullFull_To_Full(paddusw, iemAImpl_paddusw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13259}
13260
13261
13262/* Opcode 0xf3 0x0f 0xdd - invalid */
13263/* Opcode 0xf2 0x0f 0xdd - invalid */
13264
13265/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13266FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13267{
13268 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13269 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmaxub_u64);
13270}
13271
13272
13273/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13274FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13275{
13276 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13277 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaxub_u128);
13278}
13279
13280/* Opcode 0xf3 0x0f 0xde - invalid */
13281/* Opcode 0xf2 0x0f 0xde - invalid */
13282
13283
13284/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13285FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13286{
13287 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13288 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pandn_u64);
13289}
13290
13291
13292/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13293FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13294{
13295 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13296 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pandn_u128);
13297}
13298
13299
13300/* Opcode 0xf3 0x0f 0xdf - invalid */
13301/* Opcode 0xf2 0x0f 0xdf - invalid */
13302
13303/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13304FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13305{
13306 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13307 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13308}
13309
13310
13311/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13312FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13313{
13314 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13315 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13316}
13317
13318
13319/* Opcode 0xf3 0x0f 0xe0 - invalid */
13320/* Opcode 0xf2 0x0f 0xe0 - invalid */
13321
13322/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13323FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13324{
13325 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13326 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13327}
13328
13329
13330/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13331FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13332{
13333 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13334 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13335}
13336
13337
13338/* Opcode 0xf3 0x0f 0xe1 - invalid */
13339/* Opcode 0xf2 0x0f 0xe1 - invalid */
13340
13341/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13342FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13343{
13344 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13345 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13346}
13347
13348
13349/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13350FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13351{
13352 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13353 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13354}
13355
13356
13357/* Opcode 0xf3 0x0f 0xe2 - invalid */
13358/* Opcode 0xf2 0x0f 0xe2 - invalid */
13359
13360/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13361FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13362{
13363 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13364 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13365}
13366
13367
13368/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13369FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13370{
13371 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13372 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13373}
13374
13375
13376/* Opcode 0xf3 0x0f 0xe3 - invalid */
13377/* Opcode 0xf2 0x0f 0xe3 - invalid */
13378
13379/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13380FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13381{
13382 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13383 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13384}
13385
13386
13387/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13388FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13389{
13390 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13391 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13392}
13393
13394
13395/* Opcode 0xf3 0x0f 0xe4 - invalid */
13396/* Opcode 0xf2 0x0f 0xe4 - invalid */
13397
13398/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13399FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13400{
13401 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13402 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmulhw_u64);
13403}
13404
13405
13406/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13407FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13408{
13409 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13410 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhw_u128);
13411}
13412
13413
13414/* Opcode 0xf3 0x0f 0xe5 - invalid */
13415/* Opcode 0xf2 0x0f 0xe5 - invalid */
13416/* Opcode 0x0f 0xe6 - invalid */
13417
13418
13419/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13420FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13421{
13422 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13423 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13424}
13425
13426
13427/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13428FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13429{
13430 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13431 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13432}
13433
13434
13435/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13436FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13437{
13438 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13439 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13440}
13441
13442
13443/**
13444 * @opcode 0xe7
13445 * @opcodesub !11 mr/reg
13446 * @oppfx none
13447 * @opcpuid sse
13448 * @opgroup og_sse1_cachect
13449 * @opxcpttype none
13450 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13451 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13452 */
13453FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13454{
13455 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13457 if (IEM_IS_MODRM_MEM_MODE(bRm))
13458 {
13459 /* Register, memory. */
13460 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13461 IEM_MC_LOCAL(uint64_t, uSrc);
13462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13463
13464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13466 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13467 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13468 IEM_MC_FPU_TO_MMX_MODE();
13469
13470 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13471 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13472
13473 IEM_MC_ADVANCE_RIP_AND_FINISH();
13474 IEM_MC_END();
13475 }
13476 /**
13477 * @opdone
13478 * @opmnemonic ud0fe7reg
13479 * @opcode 0xe7
13480 * @opcodesub 11 mr/reg
13481 * @oppfx none
13482 * @opunused immediate
13483 * @opcpuid sse
13484 * @optest ->
13485 */
13486 else
13487 IEMOP_RAISE_INVALID_OPCODE_RET();
13488}
13489
13490/**
13491 * @opcode 0xe7
13492 * @opcodesub !11 mr/reg
13493 * @oppfx 0x66
13494 * @opcpuid sse2
13495 * @opgroup og_sse2_cachect
13496 * @opxcpttype 1
13497 * @optest op1=-1 op2=2 -> op1=2
13498 * @optest op1=0 op2=-42 -> op1=-42
13499 */
13500FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13501{
13502 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13504 if (IEM_IS_MODRM_MEM_MODE(bRm))
13505 {
13506 /* Register, memory. */
13507 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13508 IEM_MC_LOCAL(RTUINT128U, uSrc);
13509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13510
13511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13513 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13514 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13515
13516 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13517 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13518
13519 IEM_MC_ADVANCE_RIP_AND_FINISH();
13520 IEM_MC_END();
13521 }
13522
13523 /**
13524 * @opdone
13525 * @opmnemonic ud660fe7reg
13526 * @opcode 0xe7
13527 * @opcodesub 11 mr/reg
13528 * @oppfx 0x66
13529 * @opunused immediate
13530 * @opcpuid sse
13531 * @optest ->
13532 */
13533 else
13534 IEMOP_RAISE_INVALID_OPCODE_RET();
13535}
13536
13537/* Opcode 0xf3 0x0f 0xe7 - invalid */
13538/* Opcode 0xf2 0x0f 0xe7 - invalid */
13539
13540
13541/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13542FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13543{
13544 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13545 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubsb_u64);
13546}
13547
13548
13549/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13550FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13551{
13552 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13553 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubsb_u128);
13554}
13555
13556
13557/* Opcode 0xf3 0x0f 0xe8 - invalid */
13558/* Opcode 0xf2 0x0f 0xe8 - invalid */
13559
13560/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13561FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13562{
13563 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13564 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubsw_u64);
13565}
13566
13567
13568/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13569FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13570{
13571 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13572 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubsw_u128);
13573}
13574
13575
13576/* Opcode 0xf3 0x0f 0xe9 - invalid */
13577/* Opcode 0xf2 0x0f 0xe9 - invalid */
13578
13579
13580/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13581FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13582{
13583 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13584 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pminsw_u64);
13585}
13586
13587
13588/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13589FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13590{
13591 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13592 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pminsw_u128);
13593}
13594
13595
13596/* Opcode 0xf3 0x0f 0xea - invalid */
13597/* Opcode 0xf2 0x0f 0xea - invalid */
13598
13599
13600/** Opcode 0x0f 0xeb - por Pq, Qq */
13601FNIEMOP_DEF(iemOp_por_Pq_Qq)
13602{
13603 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13604 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_por_u64);
13605}
13606
13607
13608/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13609FNIEMOP_DEF(iemOp_por_Vx_Wx)
13610{
13611 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13612 SSE2_OPT_BODY_FullFull_To_Full(por, iemAImpl_por_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13613}
13614
13615
13616/* Opcode 0xf3 0x0f 0xeb - invalid */
13617/* Opcode 0xf2 0x0f 0xeb - invalid */
13618
13619/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13620FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13621{
13622 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13623 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddsb_u64);
13624}
13625
13626
13627/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13628FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13629{
13630 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13631 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddsb_u128);
13632}
13633
13634
13635/* Opcode 0xf3 0x0f 0xec - invalid */
13636/* Opcode 0xf2 0x0f 0xec - invalid */
13637
13638/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13639FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13640{
13641 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13642 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddsw_u64);
13643}
13644
13645
13646/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13647FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13648{
13649 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13650 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddsw_u128);
13651}
13652
13653
13654/* Opcode 0xf3 0x0f 0xed - invalid */
13655/* Opcode 0xf2 0x0f 0xed - invalid */
13656
13657
13658/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13659FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13660{
13661 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13662 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13663}
13664
13665
13666/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13667FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13668{
13669 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13670 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13671}
13672
13673
13674/* Opcode 0xf3 0x0f 0xee - invalid */
13675/* Opcode 0xf2 0x0f 0xee - invalid */
13676
13677
13678/** Opcode 0x0f 0xef - pxor Pq, Qq */
13679FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13680{
13681 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13682 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pxor_u64);
13683}
13684
13685
13686/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13687FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13688{
13689 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13690 SSE2_OPT_BODY_FullFull_To_Full(pxor, iemAImpl_pxor_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13691}
13692
13693
13694/* Opcode 0xf3 0x0f 0xef - invalid */
13695/* Opcode 0xf2 0x0f 0xef - invalid */
13696
13697/* Opcode 0x0f 0xf0 - invalid */
13698/* Opcode 0x66 0x0f 0xf0 - invalid */
13699
13700
13701/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13702FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13703{
13704 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13706 if (IEM_IS_MODRM_REG_MODE(bRm))
13707 {
13708 /*
13709 * Register, register - (not implemented, assuming it raises \#UD).
13710 */
13711 IEMOP_RAISE_INVALID_OPCODE_RET();
13712 }
13713 else
13714 {
13715 /*
13716 * Register, memory.
13717 */
13718 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13719 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13721
13722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
13724 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13725 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13726 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13727 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13728
13729 IEM_MC_ADVANCE_RIP_AND_FINISH();
13730 IEM_MC_END();
13731 }
13732}
13733
13734
13735/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13736FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13737{
13738 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13739 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13740}
13741
13742
13743/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13744FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13745{
13746 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13747 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13748}
13749
13750
13751/* Opcode 0xf2 0x0f 0xf1 - invalid */
13752
13753/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13754FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13755{
13756 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13757 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13758}
13759
13760
13761/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13762FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13763{
13764 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13765 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13766}
13767
13768
13769/* Opcode 0xf2 0x0f 0xf2 - invalid */
13770
13771/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13772FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13773{
13774 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13775 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13776}
13777
13778
13779/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13780FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13781{
13782 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13783 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13784}
13785
13786/* Opcode 0xf2 0x0f 0xf3 - invalid */
13787
13788/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13789FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13790{
13791 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13792 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmuludq_u64);
13793}
13794
13795
13796/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13797FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13798{
13799 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13800 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmuludq_u128);
13801}
13802
13803
13804/* Opcode 0xf2 0x0f 0xf4 - invalid */
13805
13806/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13807FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13808{
13809 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13810 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13811}
13812
13813
13814/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13815FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13816{
13817 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13818 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13819}
13820
13821/* Opcode 0xf2 0x0f 0xf5 - invalid */
13822
13823/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13824FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13825{
13826 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13827 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13828}
13829
13830
13831/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13832FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13833{
13834 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13835 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13836}
13837
13838
13839/* Opcode 0xf2 0x0f 0xf6 - invalid */
13840
13841/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13842FNIEMOP_DEF(iemOp_maskmovq_Pq_Nq)
13843{
13844// IEMOP_MNEMONIC2(RM, MASKMOVQ, maskmovq, Pq, Nq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
13845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13846 if (IEM_IS_MODRM_REG_MODE(bRm))
13847 {
13848 /*
13849 * MMX, MMX, (implicit) [ ER]DI
13850 */
13851 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13853 IEM_MC_LOCAL( uint64_t, u64EffAddr);
13854 IEM_MC_LOCAL( uint64_t, u64Mem);
13855 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Mem, u64Mem, 0);
13856 IEM_MC_ARG( uint64_t const *, puSrc, 1);
13857 IEM_MC_ARG( uint64_t const *, puMsk, 2);
13858 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13859 IEM_MC_PREPARE_FPU_USAGE();
13860 IEM_MC_FPU_TO_MMX_MODE();
13861
13862 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
13863 IEM_MC_FETCH_MEM_U64(u64Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
13864 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_REG_8(bRm));
13865 IEM_MC_REF_MREG_U64_CONST(puMsk, IEM_GET_MODRM_RM_8(bRm));
13866 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovq_u64, pu64Mem, puSrc, puMsk);
13867 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, u64EffAddr, u64Mem);
13868
13869 IEM_MC_ADVANCE_RIP_AND_FINISH();
13870 IEM_MC_END();
13871 }
13872 else
13873 {
13874 /* The memory, register encoding is invalid. */
13875 IEMOP_RAISE_INVALID_OPCODE_RET();
13876 }
13877}
13878
13879
13880/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13881FNIEMOP_DEF(iemOp_maskmovdqu_Vdq_Udq)
13882{
13883// IEMOP_MNEMONIC2(RM, MASKMOVDQU, maskmovdqu, Vdq, Udq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
13884 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13885 if (IEM_IS_MODRM_REG_MODE(bRm))
13886 {
13887 /*
13888 * XMM, XMM, (implicit) [ ER]DI
13889 */
13890 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13892 IEM_MC_LOCAL( uint64_t, u64EffAddr);
13893 IEM_MC_LOCAL( RTUINT128U, u128Mem);
13894 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128Mem, u128Mem, 0);
13895 IEM_MC_ARG( PCRTUINT128U, puSrc, 1);
13896 IEM_MC_ARG( PCRTUINT128U, puMsk, 2);
13897 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13898 IEM_MC_PREPARE_SSE_USAGE();
13899
13900 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
13901 IEM_MC_FETCH_MEM_U128(u128Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
13902 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13903 IEM_MC_REF_XREG_U128_CONST(puMsk, IEM_GET_MODRM_RM(pVCpu, bRm));
13904 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovdqu_u128, pu128Mem, puSrc, puMsk);
13905 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, u64EffAddr, u128Mem);
13906
13907 IEM_MC_ADVANCE_RIP_AND_FINISH();
13908 IEM_MC_END();
13909 }
13910 else
13911 {
13912 /* The memory, register encoding is invalid. */
13913 IEMOP_RAISE_INVALID_OPCODE_RET();
13914 }
13915}
13916
13917
13918/* Opcode 0xf2 0x0f 0xf7 - invalid */
13919
13920
13921/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13922FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13923{
13924 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13925 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubb_u64);
13926}
13927
13928
13929/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13930FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13931{
13932 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13933 SSE2_OPT_BODY_FullFull_To_Full(psubb, iemAImpl_psubb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13934}
13935
13936
13937/* Opcode 0xf2 0x0f 0xf8 - invalid */
13938
13939
13940/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13941FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13942{
13943 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13944 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubw_u64);
13945}
13946
13947
13948/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13949FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13950{
13951 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13952 SSE2_OPT_BODY_FullFull_To_Full(psubw, iemAImpl_psubw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13953}
13954
13955
13956/* Opcode 0xf2 0x0f 0xf9 - invalid */
13957
13958
13959/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13960FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13961{
13962 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13963 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubd_u64);
13964}
13965
13966
13967/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13968FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13969{
13970 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13971 SSE2_OPT_BODY_FullFull_To_Full(psubd, iemAImpl_psubd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13972}
13973
13974
13975/* Opcode 0xf2 0x0f 0xfa - invalid */
13976
13977
13978/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13979FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13980{
13981 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13982 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
13983}
13984
13985
13986/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13987FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13988{
13989 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13990 SSE2_OPT_BODY_FullFull_To_Full(psubq, iemAImpl_psubq_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13991}
13992
13993
13994/* Opcode 0xf2 0x0f 0xfb - invalid */
13995
13996
13997/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13998FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13999{
14000 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14001 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddb_u64);
14002}
14003
14004
14005/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
14006FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
14007{
14008 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14009 SSE2_OPT_BODY_FullFull_To_Full(paddb, iemAImpl_paddb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14010}
14011
14012
14013/* Opcode 0xf2 0x0f 0xfc - invalid */
14014
14015
14016/** Opcode 0x0f 0xfd - paddw Pq, Qq */
14017FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
14018{
14019 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14020 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddw_u64);
14021}
14022
14023
14024/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
14025FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
14026{
14027 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14028 SSE2_OPT_BODY_FullFull_To_Full(paddw, iemAImpl_paddw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14029}
14030
14031
14032/* Opcode 0xf2 0x0f 0xfd - invalid */
14033
14034
14035/** Opcode 0x0f 0xfe - paddd Pq, Qq */
14036FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
14037{
14038 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14039 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddd_u64);
14040}
14041
14042
14043/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
14044FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
14045{
14046 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14047 SSE2_OPT_BODY_FullFull_To_Full(paddd, iemAImpl_paddd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14048}
14049
14050
14051/* Opcode 0xf2 0x0f 0xfe - invalid */
14052
14053
14054/** Opcode **** 0x0f 0xff - UD0 */
14055FNIEMOP_DEF(iemOp_ud0)
14056{
14057 IEMOP_MNEMONIC(ud0, "ud0");
14058 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
14059 {
14060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
14061 if (IEM_IS_MODRM_MEM_MODE(bRm))
14062 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
14063 }
14064 IEMOP_HLP_DONE_DECODING();
14065 IEMOP_RAISE_INVALID_OPCODE_RET();
14066}
14067
14068
14069
14070/**
14071 * Two byte opcode map, first byte 0x0f.
14072 *
14073 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
14074 * check if it needs updating as well when making changes.
14075 */
14076const PFNIEMOP g_apfnTwoByteMap[] =
14077{
14078 /* no prefix, 066h prefix f3h prefix, f2h prefix */
14079 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
14080 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
14081 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
14082 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
14083 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
14084 /* 0x05 */ IEMOP_X4(iemOp_syscall),
14085 /* 0x06 */ IEMOP_X4(iemOp_clts),
14086 /* 0x07 */ IEMOP_X4(iemOp_sysret),
14087 /* 0x08 */ IEMOP_X4(iemOp_invd),
14088 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
14089 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
14090 /* 0x0b */ IEMOP_X4(iemOp_ud2),
14091 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
14092 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
14093 /* 0x0e */ IEMOP_X4(iemOp_femms),
14094 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
14095
14096 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
14097 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
14098 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
14099 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14100 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14101 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14102 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
14103 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14104 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
14105 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
14106 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
14107 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
14108 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
14109 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
14110 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
14111 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
14112
14113 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
14114 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
14115 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
14116 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
14117 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
14118 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14119 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
14120 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14121 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14122 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14123 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
14124 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14125 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
14126 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
14127 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14128 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14129
14130 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
14131 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
14132 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
14133 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
14134 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
14135 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
14136 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
14137 /* 0x37 */ IEMOP_X4(iemOp_getsec),
14138 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
14139 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14140 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
14141 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14142 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14143 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14144 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14145 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14146
14147 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
14148 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
14149 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
14150 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
14151 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
14152 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
14153 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
14154 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
14155 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
14156 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
14157 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
14158 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
14159 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
14160 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
14161 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
14162 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
14163
14164 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14165 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
14166 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
14167 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
14168 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14169 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14170 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14171 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14172 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
14173 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
14174 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
14175 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
14176 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
14177 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
14178 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
14179 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
14180
14181 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14182 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14183 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14184 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14185 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14186 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14187 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14188 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14189 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14190 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14191 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14192 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14193 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14194 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14195 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14196 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
14197
14198 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
14199 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
14200 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
14201 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
14202 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14203 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14204 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14205 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14206
14207 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14208 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14209 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14210 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14211 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
14212 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
14213 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
14214 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
14215
14216 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
14217 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
14218 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
14219 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
14220 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
14221 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
14222 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
14223 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
14224 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
14225 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
14226 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
14227 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
14228 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14229 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14230 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14231 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14232
14233 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14234 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14235 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14236 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14237 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14238 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14239 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14240 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14241 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14242 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14243 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14244 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14245 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14246 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14247 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14248 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14249
14250 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14251 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14252 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14253 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14254 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14255 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14256 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14257 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14258 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14259 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14260 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14261 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14262 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14263 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14264 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14265 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14266
14267 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14268 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14269 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14270 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14271 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14272 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14273 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14274 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14275 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14276 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14277 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14278 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14279 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14280 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14281 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14282 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14283
14284 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14285 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14286 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14287 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14288 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14289 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14290 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14291 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14292 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14293 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14294 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14295 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14296 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14297 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14298 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14299 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14300
14301 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14302 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14303 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14304 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14305 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14306 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14307 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14308 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14309 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14310 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14311 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14312 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14313 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14314 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14315 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14316 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14317
14318 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14319 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14320 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14321 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14322 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14323 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14324 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14325 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14326 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14327 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14328 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14329 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14330 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14331 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14332 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14333 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14334
14335 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14336 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14337 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14338 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14339 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14340 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14341 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14342 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14343 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14344 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14345 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14346 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14347 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14348 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14349 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14350 /* 0xff */ IEMOP_X4(iemOp_ud0),
14351};
14352AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14353
14354/** @} */
14355
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette