VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h@ 104195

Last change on this file since 104195 was 104195, checked in by vboxsync, 8 months ago

VMM/IEM: Refactoring assembly helpers to not pass eflags by reference but instead by value and return the updated value (via eax/w0) - first chunk: ADD,ADC,SUB,SBB,CMP,TEST,AND,OR,XOR. bugref:10376

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 506.9 KB
Line 
1/* $Id: IEMAllInstTwoByte0f.cpp.h 104195 2024-04-05 14:45:23Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 *
42 * The @a pfnU64 worker function takes no FXSAVE state, just the operands.
43 */
44FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
45{
46 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
47 if (IEM_IS_MODRM_REG_MODE(bRm))
48 {
49 /*
50 * MMX, MMX.
51 */
52 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
53 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
54 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
55 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
56 IEM_MC_ARG(uint64_t *, pDst, 0);
57 IEM_MC_ARG(uint64_t const *, pSrc, 1);
58 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
59 IEM_MC_PREPARE_FPU_USAGE();
60 IEM_MC_FPU_TO_MMX_MODE();
61
62 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
63 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
64 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
65 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
66
67 IEM_MC_ADVANCE_RIP_AND_FINISH();
68 IEM_MC_END();
69 }
70 else
71 {
72 /*
73 * MMX, [mem64].
74 */
75 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
76 IEM_MC_ARG(uint64_t *, pDst, 0);
77 IEM_MC_LOCAL(uint64_t, uSrc);
78 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
79 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
80
81 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
82 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
83 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
84 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
85
86 IEM_MC_PREPARE_FPU_USAGE();
87 IEM_MC_FPU_TO_MMX_MODE();
88
89 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
90 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
91 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
92
93 IEM_MC_ADVANCE_RIP_AND_FINISH();
94 IEM_MC_END();
95 }
96}
97
98
99/**
100 * Common worker for MMX instructions on the form:
101 * pxxx mm1, mm2/mem64
102 * for instructions introduced with SSE.
103 *
104 * The @a pfnU64 worker function takes no FXSAVE state, just the operands.
105 */
106FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
107{
108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
109 if (IEM_IS_MODRM_REG_MODE(bRm))
110 {
111 /*
112 * MMX, MMX.
113 */
114 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
115 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
116 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
118 IEM_MC_ARG(uint64_t *, pDst, 0);
119 IEM_MC_ARG(uint64_t const *, pSrc, 1);
120 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
121 IEM_MC_PREPARE_FPU_USAGE();
122 IEM_MC_FPU_TO_MMX_MODE();
123
124 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
125 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
126 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
127 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
128
129 IEM_MC_ADVANCE_RIP_AND_FINISH();
130 IEM_MC_END();
131 }
132 else
133 {
134 /*
135 * MMX, [mem64].
136 */
137 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
138 IEM_MC_ARG(uint64_t *, pDst, 0);
139 IEM_MC_LOCAL(uint64_t, uSrc);
140 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
142
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
145 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
146 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
147
148 IEM_MC_PREPARE_FPU_USAGE();
149 IEM_MC_FPU_TO_MMX_MODE();
150
151 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
152 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
153 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
154
155 IEM_MC_ADVANCE_RIP_AND_FINISH();
156 IEM_MC_END();
157 }
158}
159
160
161/**
162 * Common worker for MMX instructions on the form:
163 * pxxx mm1, mm2/mem64
164 * that was introduced with SSE2.
165 */
166FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
167{
168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
169 if (IEM_IS_MODRM_REG_MODE(bRm))
170 {
171 /*
172 * MMX, MMX.
173 */
174 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
175 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
176 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
178 IEM_MC_ARG(uint64_t *, pDst, 0);
179 IEM_MC_ARG(uint64_t const *, pSrc, 1);
180 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
181 IEM_MC_PREPARE_FPU_USAGE();
182 IEM_MC_FPU_TO_MMX_MODE();
183
184 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
185 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
186 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
187 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
188
189 IEM_MC_ADVANCE_RIP_AND_FINISH();
190 IEM_MC_END();
191 }
192 else
193 {
194 /*
195 * MMX, [mem64].
196 */
197 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
198 IEM_MC_ARG(uint64_t *, pDst, 0);
199 IEM_MC_LOCAL(uint64_t, uSrc);
200 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
202
203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
205 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
206 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
207
208 IEM_MC_PREPARE_FPU_USAGE();
209 IEM_MC_FPU_TO_MMX_MODE();
210
211 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
212 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
213 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
214
215 IEM_MC_ADVANCE_RIP_AND_FINISH();
216 IEM_MC_END();
217 }
218}
219
220
221/**
222 * Common worker for SSE instructions of the form:
223 * pxxx xmm1, xmm2/mem128
224 *
225 * Proper alignment of the 128-bit operand is enforced.
226 * SSE cpuid checks. No SIMD FP exceptions.
227 *
228 * The @a pfnU128 worker function takes no FXSAVE state, just the operands.
229 *
230 * @sa iemOpCommonSse2_FullFull_To_Full
231 */
232FNIEMOP_DEF_1(iemOpCommonSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
233{
234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
235 if (IEM_IS_MODRM_REG_MODE(bRm))
236 {
237 /*
238 * XMM, XMM.
239 */
240 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
242 IEM_MC_ARG(PRTUINT128U, pDst, 0);
243 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
244 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
245 IEM_MC_PREPARE_SSE_USAGE();
246 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
248 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
249 IEM_MC_ADVANCE_RIP_AND_FINISH();
250 IEM_MC_END();
251 }
252 else
253 {
254 /*
255 * XMM, [mem128].
256 */
257 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
258 IEM_MC_ARG(PRTUINT128U, pDst, 0);
259 IEM_MC_LOCAL(RTUINT128U, uSrc);
260 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
262
263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
265 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
266 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
267
268 IEM_MC_PREPARE_SSE_USAGE();
269 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
270 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
271
272 IEM_MC_ADVANCE_RIP_AND_FINISH();
273 IEM_MC_END();
274 }
275}
276
277
278/**
279 * Common worker for SSE2 instructions on the forms:
280 * pxxx xmm1, xmm2/mem128
281 *
282 * Proper alignment of the 128-bit operand is enforced.
283 * Exceptions type 4. SSE2 cpuid checks.
284 *
285 * The @a pfnU128 worker function takes no FXSAVE state, just the operands.
286 *
287 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
288 */
289FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
290{
291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
292 if (IEM_IS_MODRM_REG_MODE(bRm))
293 {
294 /*
295 * XMM, XMM.
296 */
297 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
299 IEM_MC_ARG(PRTUINT128U, pDst, 0);
300 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
302 IEM_MC_PREPARE_SSE_USAGE();
303 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
304 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
305 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
306 IEM_MC_ADVANCE_RIP_AND_FINISH();
307 IEM_MC_END();
308 }
309 else
310 {
311 /*
312 * XMM, [mem128].
313 */
314 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
315 IEM_MC_ARG(PRTUINT128U, pDst, 0);
316 IEM_MC_LOCAL(RTUINT128U, uSrc);
317 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
319
320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
322 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
323 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
324
325 IEM_MC_PREPARE_SSE_USAGE();
326 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
327 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
328
329 IEM_MC_ADVANCE_RIP_AND_FINISH();
330 IEM_MC_END();
331 }
332}
333
334
335/**
336 * Common worker for MMX instructions on the forms:
337 * pxxxx mm1, mm2/mem32
338 *
339 * The 2nd operand is the first half of a register, which in the memory case
340 * means a 32-bit memory access.
341 */
342FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
343{
344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
345 if (IEM_IS_MODRM_REG_MODE(bRm))
346 {
347 /*
348 * MMX, MMX.
349 */
350 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
352 IEM_MC_ARG(uint64_t *, puDst, 0);
353 IEM_MC_ARG(uint64_t const *, puSrc, 1);
354 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
355 IEM_MC_PREPARE_FPU_USAGE();
356 IEM_MC_FPU_TO_MMX_MODE();
357
358 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
359 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
360 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
361 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
362
363 IEM_MC_ADVANCE_RIP_AND_FINISH();
364 IEM_MC_END();
365 }
366 else
367 {
368 /*
369 * MMX, [mem32].
370 */
371 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
372 IEM_MC_ARG(uint64_t *, puDst, 0);
373 IEM_MC_LOCAL(uint64_t, uSrc);
374 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
376
377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
379 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
380 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
381
382 IEM_MC_PREPARE_FPU_USAGE();
383 IEM_MC_FPU_TO_MMX_MODE();
384
385 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
386 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
387 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
388
389 IEM_MC_ADVANCE_RIP_AND_FINISH();
390 IEM_MC_END();
391 }
392}
393
394
395/**
396 * Common worker for SSE instructions on the forms:
397 * pxxxx xmm1, xmm2/mem128
398 *
399 * The 2nd operand is the first half of a register, which in the memory case
400 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
401 *
402 * Exceptions type 4.
403 */
404FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
405{
406 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
407 if (IEM_IS_MODRM_REG_MODE(bRm))
408 {
409 /*
410 * XMM, XMM.
411 */
412 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
414 IEM_MC_ARG(PRTUINT128U, puDst, 0);
415 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
416 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
417 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
418 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
419 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
420 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
421 IEM_MC_ADVANCE_RIP_AND_FINISH();
422 IEM_MC_END();
423 }
424 else
425 {
426 /*
427 * XMM, [mem128].
428 */
429 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
430 IEM_MC_ARG(PRTUINT128U, puDst, 0);
431 IEM_MC_LOCAL(RTUINT128U, uSrc);
432 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
434
435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
437 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
438 /** @todo Most CPUs probably only read the low qword. We read everything to
439 * make sure we apply segmentation and alignment checks correctly.
440 * When we have time, it would be interesting to explore what real
441 * CPUs actually does and whether it will do a TLB load for the high
442 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
443 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
444
445 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
446 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
447 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
448
449 IEM_MC_ADVANCE_RIP_AND_FINISH();
450 IEM_MC_END();
451 }
452}
453
454
455/**
456 * Common worker for SSE2 instructions on the forms:
457 * pxxxx xmm1, xmm2/mem128
458 *
459 * The 2nd operand is the first half of a register, which in the memory case
460 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
461 *
462 * Exceptions type 4.
463 */
464FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
465{
466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
467 if (IEM_IS_MODRM_REG_MODE(bRm))
468 {
469 /*
470 * XMM, XMM.
471 */
472 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
474 IEM_MC_ARG(PRTUINT128U, puDst, 0);
475 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
477 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
478 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
479 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
480 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
481 IEM_MC_ADVANCE_RIP_AND_FINISH();
482 IEM_MC_END();
483 }
484 else
485 {
486 /*
487 * XMM, [mem128].
488 */
489 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
490 IEM_MC_ARG(PRTUINT128U, puDst, 0);
491 IEM_MC_LOCAL(RTUINT128U, uSrc);
492 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
494
495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
498 /** @todo Most CPUs probably only read the low qword. We read everything to
499 * make sure we apply segmentation and alignment checks correctly.
500 * When we have time, it would be interesting to explore what real
501 * CPUs actually does and whether it will do a TLB load for the high
502 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
503 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
504
505 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
506 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
507 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
508
509 IEM_MC_ADVANCE_RIP_AND_FINISH();
510 IEM_MC_END();
511 }
512}
513
514
515/**
516 * Common worker for MMX instructions on the form:
517 * pxxxx mm1, mm2/mem64
518 *
519 * The 2nd operand is the second half of a register, which in the memory case
520 * means a 64-bit memory access for MMX.
521 */
522FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
523{
524 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
525 if (IEM_IS_MODRM_REG_MODE(bRm))
526 {
527 /*
528 * MMX, MMX.
529 */
530 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
531 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
532 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
534 IEM_MC_ARG(uint64_t *, puDst, 0);
535 IEM_MC_ARG(uint64_t const *, puSrc, 1);
536 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
537 IEM_MC_PREPARE_FPU_USAGE();
538 IEM_MC_FPU_TO_MMX_MODE();
539
540 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
541 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
542 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
543 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
544
545 IEM_MC_ADVANCE_RIP_AND_FINISH();
546 IEM_MC_END();
547 }
548 else
549 {
550 /*
551 * MMX, [mem64].
552 */
553 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
554 IEM_MC_ARG(uint64_t *, puDst, 0);
555 IEM_MC_LOCAL(uint64_t, uSrc);
556 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
558
559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
561 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
562 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
563
564 IEM_MC_PREPARE_FPU_USAGE();
565 IEM_MC_FPU_TO_MMX_MODE();
566
567 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
568 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
569 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
570
571 IEM_MC_ADVANCE_RIP_AND_FINISH();
572 IEM_MC_END();
573 }
574}
575
576
577/**
578 * Common worker for SSE instructions on the form:
579 * pxxxx xmm1, xmm2/mem128
580 *
581 * The 2nd operand is the second half of a register, which for SSE a 128-bit
582 * aligned access where it may read the full 128 bits or only the upper 64 bits.
583 *
584 * Exceptions type 4.
585 */
586FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
587{
588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
589 if (IEM_IS_MODRM_REG_MODE(bRm))
590 {
591 /*
592 * XMM, XMM.
593 */
594 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
596 IEM_MC_ARG(PRTUINT128U, puDst, 0);
597 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
598 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
599 IEM_MC_PREPARE_SSE_USAGE();
600 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
601 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
602 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
603 IEM_MC_ADVANCE_RIP_AND_FINISH();
604 IEM_MC_END();
605 }
606 else
607 {
608 /*
609 * XMM, [mem128].
610 */
611 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
612 IEM_MC_ARG(PRTUINT128U, puDst, 0);
613 IEM_MC_LOCAL(RTUINT128U, uSrc);
614 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
616
617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
619 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
620 /** @todo Most CPUs probably only read the high qword. We read everything to
621 * make sure we apply segmentation and alignment checks correctly.
622 * When we have time, it would be interesting to explore what real
623 * CPUs actually does and whether it will do a TLB load for the lower
624 * part or skip any associated \#PF. */
625 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
626
627 IEM_MC_PREPARE_SSE_USAGE();
628 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
629 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
630
631 IEM_MC_ADVANCE_RIP_AND_FINISH();
632 IEM_MC_END();
633 }
634}
635
636
637/**
638 * Common worker for SSE instructions on the forms:
639 * pxxs xmm1, xmm2/mem128
640 *
641 * Proper alignment of the 128-bit operand is enforced.
642 * Exceptions type 2. SSE cpuid checks.
643 *
644 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
645 */
646FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
647{
648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
649 if (IEM_IS_MODRM_REG_MODE(bRm))
650 {
651 /*
652 * XMM128, XMM128.
653 */
654 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
656 IEM_MC_LOCAL(X86XMMREG, SseRes);
657 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
658 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
659 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
660 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
661 IEM_MC_PREPARE_SSE_USAGE();
662 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
663 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
664 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
665 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
666 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
667
668 IEM_MC_ADVANCE_RIP_AND_FINISH();
669 IEM_MC_END();
670 }
671 else
672 {
673 /*
674 * XMM128, [mem128].
675 */
676 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
677 IEM_MC_LOCAL(X86XMMREG, SseRes);
678 IEM_MC_LOCAL(X86XMMREG, uSrc2);
679 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
680 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
681 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
683
684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
686 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
687 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
688
689 IEM_MC_PREPARE_SSE_USAGE();
690 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
691 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
692 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
693 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
694
695 IEM_MC_ADVANCE_RIP_AND_FINISH();
696 IEM_MC_END();
697 }
698}
699
700
701/**
702 * Common worker for SSE instructions on the forms:
703 * pxxs xmm1, xmm2/mem32
704 *
705 * Proper alignment of the 128-bit operand is enforced.
706 * Exceptions type 2. SSE cpuid checks.
707 *
708 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
709 */
710FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
711{
712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
713 if (IEM_IS_MODRM_REG_MODE(bRm))
714 {
715 /*
716 * XMM128, XMM32.
717 */
718 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
720 IEM_MC_LOCAL(X86XMMREG, SseRes);
721 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
722 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
723 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
724 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
725 IEM_MC_PREPARE_SSE_USAGE();
726 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
727 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
728 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
729 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
730 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
731
732 IEM_MC_ADVANCE_RIP_AND_FINISH();
733 IEM_MC_END();
734 }
735 else
736 {
737 /*
738 * XMM128, [mem32].
739 */
740 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
741 IEM_MC_LOCAL(X86XMMREG, SseRes);
742 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
743 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
744 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
745 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
747
748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
750 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
751 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
752
753 IEM_MC_PREPARE_SSE_USAGE();
754 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
755 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
756 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
757 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
758
759 IEM_MC_ADVANCE_RIP_AND_FINISH();
760 IEM_MC_END();
761 }
762}
763
764
765/**
766 * Common worker for SSE2 instructions on the forms:
767 * pxxd xmm1, xmm2/mem128
768 *
769 * Proper alignment of the 128-bit operand is enforced.
770 * Exceptions type 2. SSE cpuid checks.
771 *
772 * @sa iemOpCommonSseFp_FullFull_To_Full
773 */
774FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
775{
776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
777 if (IEM_IS_MODRM_REG_MODE(bRm))
778 {
779 /*
780 * XMM128, XMM128.
781 */
782 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
784 IEM_MC_LOCAL(X86XMMREG, SseRes);
785 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
786 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
787 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
788 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
789 IEM_MC_PREPARE_SSE_USAGE();
790 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
791 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
792 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
793 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
794 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
795
796 IEM_MC_ADVANCE_RIP_AND_FINISH();
797 IEM_MC_END();
798 }
799 else
800 {
801 /*
802 * XMM128, [mem128].
803 */
804 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
805 IEM_MC_LOCAL(X86XMMREG, SseRes);
806 IEM_MC_LOCAL(X86XMMREG, uSrc2);
807 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
808 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
809 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
811
812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
814 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
815 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
816
817 IEM_MC_PREPARE_SSE_USAGE();
818 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
819 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
820 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
821 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
822
823 IEM_MC_ADVANCE_RIP_AND_FINISH();
824 IEM_MC_END();
825 }
826}
827
828
829/**
830 * Common worker for SSE2 instructions on the forms:
831 * pxxs xmm1, xmm2/mem64
832 *
833 * Proper alignment of the 128-bit operand is enforced.
834 * Exceptions type 2. SSE2 cpuid checks.
835 *
836 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
837 */
838FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
839{
840 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
841 if (IEM_IS_MODRM_REG_MODE(bRm))
842 {
843 /*
844 * XMM, XMM.
845 */
846 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
848 IEM_MC_LOCAL(X86XMMREG, SseRes);
849 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
850 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
851 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
852 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
853 IEM_MC_PREPARE_SSE_USAGE();
854 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
855 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
856 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
857 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
858 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
859
860 IEM_MC_ADVANCE_RIP_AND_FINISH();
861 IEM_MC_END();
862 }
863 else
864 {
865 /*
866 * XMM, [mem64].
867 */
868 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
869 IEM_MC_LOCAL(X86XMMREG, SseRes);
870 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
871 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
872 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
873 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
875
876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
878 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
879 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
880
881 IEM_MC_PREPARE_SSE_USAGE();
882 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
883 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
884 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
885 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
886
887 IEM_MC_ADVANCE_RIP_AND_FINISH();
888 IEM_MC_END();
889 }
890}
891
892
893/**
894 * Common worker for SSE2 instructions on the form:
895 * pxxxx xmm1, xmm2/mem128
896 *
897 * The 2nd operand is the second half of a register, which for SSE a 128-bit
898 * aligned access where it may read the full 128 bits or only the upper 64 bits.
899 *
900 * Exceptions type 4.
901 */
902FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
903{
904 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
905 if (IEM_IS_MODRM_REG_MODE(bRm))
906 {
907 /*
908 * XMM, XMM.
909 */
910 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
912 IEM_MC_ARG(PRTUINT128U, puDst, 0);
913 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
914 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
915 IEM_MC_PREPARE_SSE_USAGE();
916 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
917 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
918 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
919 IEM_MC_ADVANCE_RIP_AND_FINISH();
920 IEM_MC_END();
921 }
922 else
923 {
924 /*
925 * XMM, [mem128].
926 */
927 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
928 IEM_MC_ARG(PRTUINT128U, puDst, 0);
929 IEM_MC_LOCAL(RTUINT128U, uSrc);
930 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
932
933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
935 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
936 /** @todo Most CPUs probably only read the high qword. We read everything to
937 * make sure we apply segmentation and alignment checks correctly.
938 * When we have time, it would be interesting to explore what real
939 * CPUs actually does and whether it will do a TLB load for the lower
940 * part or skip any associated \#PF. */
941 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
942
943 IEM_MC_PREPARE_SSE_USAGE();
944 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
945 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
946
947 IEM_MC_ADVANCE_RIP_AND_FINISH();
948 IEM_MC_END();
949 }
950}
951
952
953/**
954 * Common worker for SSE3 instructions on the forms:
955 * hxxx xmm1, xmm2/mem128
956 *
957 * Proper alignment of the 128-bit operand is enforced.
958 * Exceptions type 2. SSE3 cpuid checks.
959 *
960 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
961 */
962FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
963{
964 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
965 if (IEM_IS_MODRM_REG_MODE(bRm))
966 {
967 /*
968 * XMM, XMM.
969 */
970 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
972 IEM_MC_LOCAL(X86XMMREG, SseRes);
973 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
974 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
975 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
976 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
977 IEM_MC_PREPARE_SSE_USAGE();
978 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
979 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
980 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
981 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
982 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
983
984 IEM_MC_ADVANCE_RIP_AND_FINISH();
985 IEM_MC_END();
986 }
987 else
988 {
989 /*
990 * XMM, [mem128].
991 */
992 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
993 IEM_MC_LOCAL(X86XMMREG, SseRes);
994 IEM_MC_LOCAL(X86XMMREG, uSrc2);
995 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
996 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
997 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
999
1000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1002 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1003 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1004
1005 IEM_MC_PREPARE_SSE_USAGE();
1006 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1007 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1008 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1009 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
1010
1011 IEM_MC_ADVANCE_RIP_AND_FINISH();
1012 IEM_MC_END();
1013 }
1014}
1015
1016
1017/** Opcode 0x0f 0x00 /0. */
1018FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1019{
1020 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1021 IEMOP_HLP_MIN_286();
1022 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1023
1024 if (IEM_IS_MODRM_REG_MODE(bRm))
1025 {
1026 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1027 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1028 iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1029 }
1030
1031 /* Ignore operand size here, memory refs are always 16-bit. */
1032 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1033 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1035 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1036 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1037 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1038 IEM_MC_END();
1039}
1040
1041
1042/** Opcode 0x0f 0x00 /1. */
1043FNIEMOPRM_DEF(iemOp_Grp6_str)
1044{
1045 IEMOP_MNEMONIC(str, "str Rv/Mw");
1046 IEMOP_HLP_MIN_286();
1047 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1048
1049
1050 if (IEM_IS_MODRM_REG_MODE(bRm))
1051 {
1052 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1053 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1054 iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1055 }
1056
1057 /* Ignore operand size here, memory refs are always 16-bit. */
1058 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1059 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1061 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1062 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1063 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1064 IEM_MC_END();
1065}
1066
1067
1068/** Opcode 0x0f 0x00 /2. */
1069FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1070{
1071 IEMOP_MNEMONIC(lldt, "lldt Ew");
1072 IEMOP_HLP_MIN_286();
1073 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1074
1075 if (IEM_IS_MODRM_REG_MODE(bRm))
1076 {
1077 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1078 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1079 IEM_MC_ARG(uint16_t, u16Sel, 0);
1080 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1081 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1082 IEM_MC_END();
1083 }
1084 else
1085 {
1086 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1087 IEM_MC_ARG(uint16_t, u16Sel, 0);
1088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1090 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1091 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1092 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1093 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1094 IEM_MC_END();
1095 }
1096}
1097
1098
1099/** Opcode 0x0f 0x00 /3. */
1100FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1101{
1102 IEMOP_MNEMONIC(ltr, "ltr Ew");
1103 IEMOP_HLP_MIN_286();
1104 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1105
1106 if (IEM_IS_MODRM_REG_MODE(bRm))
1107 {
1108 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1110 IEM_MC_ARG(uint16_t, u16Sel, 0);
1111 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1112 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1113 IEM_MC_END();
1114 }
1115 else
1116 {
1117 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1118 IEM_MC_ARG(uint16_t, u16Sel, 0);
1119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1122 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1123 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1124 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1125 IEM_MC_END();
1126 }
1127}
1128
1129
1130/* Need to associate flag info with the blocks, so duplicate the code. */
1131#define IEMOP_BODY_GRP6_VERX(bRm, fWrite) \
1132 IEMOP_HLP_MIN_286(); \
1133 IEMOP_HLP_NO_REAL_OR_V86_MODE(); \
1134 \
1135 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1136 { \
1137 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0); \
1138 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1139 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1140 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1141 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1142 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1143 IEM_MC_END(); \
1144 } \
1145 else \
1146 { \
1147 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0); \
1148 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1149 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1152 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1153 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1154 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1155 IEM_MC_END(); \
1156 } (void)0
1157
1158/**
1159 * @opmaps grp6
1160 * @opcode /4
1161 * @opflmodify zf
1162 */
1163FNIEMOPRM_DEF(iemOp_Grp6_verr)
1164{
1165 IEMOP_MNEMONIC(verr, "verr Ew");
1166 IEMOP_BODY_GRP6_VERX(bRm, false);
1167}
1168
1169
1170/**
1171 * @opmaps grp6
1172 * @opcode /5
1173 * @opflmodify zf
1174 */
1175FNIEMOPRM_DEF(iemOp_Grp6_verw)
1176{
1177 IEMOP_MNEMONIC(verw, "verw Ew");
1178 IEMOP_BODY_GRP6_VERX(bRm, true);
1179}
1180
1181
1182/**
1183 * Group 6 jump table.
1184 */
1185IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1186{
1187 iemOp_Grp6_sldt,
1188 iemOp_Grp6_str,
1189 iemOp_Grp6_lldt,
1190 iemOp_Grp6_ltr,
1191 iemOp_Grp6_verr,
1192 iemOp_Grp6_verw,
1193 iemOp_InvalidWithRM,
1194 iemOp_InvalidWithRM
1195};
1196
1197/** Opcode 0x0f 0x00. */
1198FNIEMOP_DEF(iemOp_Grp6)
1199{
1200 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1201 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1202}
1203
1204
1205/** Opcode 0x0f 0x01 /0. */
1206FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1207{
1208 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1209 IEMOP_HLP_MIN_286();
1210 IEMOP_HLP_64BIT_OP_SIZE();
1211 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1212 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1215 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1216 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1217 IEM_MC_END();
1218}
1219
1220
1221/** Opcode 0x0f 0x01 /0. */
1222FNIEMOP_DEF(iemOp_Grp7_vmcall)
1223{
1224 IEMOP_MNEMONIC(vmcall, "vmcall");
1225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1226
1227 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1228 want all hypercalls regardless of instruction used, and if a
1229 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1230 (NEM/win makes ASSUMPTIONS about this behavior.) */
1231 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmcall);
1232}
1233
1234
1235/** Opcode 0x0f 0x01 /0. */
1236#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1237FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1238{
1239 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1240 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1241 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1242 IEMOP_HLP_DONE_DECODING();
1243 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1244 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1245 iemCImpl_vmlaunch);
1246}
1247#else
1248FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1249{
1250 IEMOP_BITCH_ABOUT_STUB();
1251 IEMOP_RAISE_INVALID_OPCODE_RET();
1252}
1253#endif
1254
1255
1256/** Opcode 0x0f 0x01 /0. */
1257#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1258FNIEMOP_DEF(iemOp_Grp7_vmresume)
1259{
1260 IEMOP_MNEMONIC(vmresume, "vmresume");
1261 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1262 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1263 IEMOP_HLP_DONE_DECODING();
1264 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1265 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1266 iemCImpl_vmresume);
1267}
1268#else
1269FNIEMOP_DEF(iemOp_Grp7_vmresume)
1270{
1271 IEMOP_BITCH_ABOUT_STUB();
1272 IEMOP_RAISE_INVALID_OPCODE_RET();
1273}
1274#endif
1275
1276
1277/** Opcode 0x0f 0x01 /0. */
1278#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1279FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1280{
1281 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1282 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1283 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1284 IEMOP_HLP_DONE_DECODING();
1285 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmxoff);
1286}
1287#else
1288FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1289{
1290 IEMOP_BITCH_ABOUT_STUB();
1291 IEMOP_RAISE_INVALID_OPCODE_RET();
1292}
1293#endif
1294
1295
1296/** Opcode 0x0f 0x01 /1. */
1297FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1298{
1299 IEMOP_MNEMONIC(sidt, "sidt Ms");
1300 IEMOP_HLP_MIN_286();
1301 IEMOP_HLP_64BIT_OP_SIZE();
1302 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1303 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1306 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1307 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1308 IEM_MC_END();
1309}
1310
1311
1312/** Opcode 0x0f 0x01 /1. */
1313FNIEMOP_DEF(iemOp_Grp7_monitor)
1314{
1315 IEMOP_MNEMONIC(monitor, "monitor");
1316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1317 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1318}
1319
1320
1321/** Opcode 0x0f 0x01 /1. */
1322FNIEMOP_DEF(iemOp_Grp7_mwait)
1323{
1324 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1326 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_mwait);
1327}
1328
1329
1330/** Opcode 0x0f 0x01 /2. */
1331FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1332{
1333 IEMOP_MNEMONIC(lgdt, "lgdt");
1334 IEMOP_HLP_64BIT_OP_SIZE();
1335 IEM_MC_BEGIN(0, 0);
1336 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1339 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1340 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1341 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1342 IEM_MC_END();
1343}
1344
1345
1346/** Opcode 0x0f 0x01 0xd0. */
1347FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1348{
1349 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1350 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1351 {
1352 /** @todo r=ramshankar: We should use
1353 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1354 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1355/** @todo testcase: test prefixes and exceptions. currently not checking for the
1356 * OPSIZE one ... */
1357 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1358 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
1359 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1360 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
1361 iemCImpl_xgetbv);
1362 }
1363 IEMOP_RAISE_INVALID_OPCODE_RET();
1364}
1365
1366
1367/** Opcode 0x0f 0x01 0xd1. */
1368FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1369{
1370 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1371 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1372 {
1373 /** @todo r=ramshankar: We should use
1374 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1375 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1376/** @todo testcase: test prefixes and exceptions. currently not checking for the
1377 * OPSIZE one ... */
1378 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1379 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_xsetbv);
1380 }
1381 IEMOP_RAISE_INVALID_OPCODE_RET();
1382}
1383
1384
1385/** Opcode 0x0f 0x01 /3. */
1386FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1387{
1388 IEMOP_MNEMONIC(lidt, "lidt");
1389 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1390 IEM_MC_BEGIN(0, 0);
1391 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1394 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1395 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg, /*=*/ enmEffOpSize, 2);
1396 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1397 IEM_MC_END();
1398}
1399
1400
1401/** Opcode 0x0f 0x01 0xd8. */
1402#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1403FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1404{
1405 IEMOP_MNEMONIC(vmrun, "vmrun");
1406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1407 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1408 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1409 iemCImpl_vmrun);
1410}
1411#else
1412FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1413#endif
1414
1415/** Opcode 0x0f 0x01 0xd9. */
1416FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1417{
1418 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1419 /** @todo r=bird: Table A-8 on page 524 in vol 3 has VMGEXIT for this
1420 * opcode sequence when F3 or F2 is used as prefix. So, the assumtion
1421 * here cannot be right... */
1422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1423
1424 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1425 want all hypercalls regardless of instruction used, and if a
1426 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1427 (NEM/win makes ASSUMPTIONS about this behavior.) */
1428 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmmcall);
1429}
1430
1431/** Opcode 0x0f 0x01 0xda. */
1432#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1433FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1434{
1435 IEMOP_MNEMONIC(vmload, "vmload");
1436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1437 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmload);
1438}
1439#else
1440FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1441#endif
1442
1443
1444/** Opcode 0x0f 0x01 0xdb. */
1445#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1446FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1447{
1448 IEMOP_MNEMONIC(vmsave, "vmsave");
1449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1450 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmsave);
1451}
1452#else
1453FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1454#endif
1455
1456
1457/** Opcode 0x0f 0x01 0xdc. */
1458#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1459FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1460{
1461 IEMOP_MNEMONIC(stgi, "stgi");
1462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1463 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_stgi);
1464}
1465#else
1466FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1467#endif
1468
1469
1470/** Opcode 0x0f 0x01 0xdd. */
1471#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1472FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1473{
1474 IEMOP_MNEMONIC(clgi, "clgi");
1475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1476 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clgi);
1477}
1478#else
1479FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1480#endif
1481
1482
1483/** Opcode 0x0f 0x01 0xdf. */
1484#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1485FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1486{
1487 IEMOP_MNEMONIC(invlpga, "invlpga");
1488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1489 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpga);
1490}
1491#else
1492FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1493#endif
1494
1495
1496/** Opcode 0x0f 0x01 0xde. */
1497#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1498FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1499{
1500 IEMOP_MNEMONIC(skinit, "skinit");
1501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1502 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_skinit);
1503}
1504#else
1505FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1506#endif
1507
1508
1509/** Opcode 0x0f 0x01 /4. */
1510FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1511{
1512 IEMOP_MNEMONIC(smsw, "smsw");
1513 IEMOP_HLP_MIN_286();
1514 if (IEM_IS_MODRM_REG_MODE(bRm))
1515 {
1516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1517 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1518 iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1519 }
1520
1521 /* Ignore operand size here, memory refs are always 16-bit. */
1522 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1523 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1526 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1527 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1528 IEM_MC_END();
1529}
1530
1531
1532/** Opcode 0x0f 0x01 /6. */
1533FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1534{
1535 /* The operand size is effectively ignored, all is 16-bit and only the
1536 lower 3-bits are used. */
1537 IEMOP_MNEMONIC(lmsw, "lmsw");
1538 IEMOP_HLP_MIN_286();
1539 if (IEM_IS_MODRM_REG_MODE(bRm))
1540 {
1541 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1543 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1544 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1545 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1546 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1547 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1548 IEM_MC_END();
1549 }
1550 else
1551 {
1552 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1553 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1554 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1557 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1558 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1559 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1560 IEM_MC_END();
1561 }
1562}
1563
1564
1565/** Opcode 0x0f 0x01 /7. */
1566FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1567{
1568 IEMOP_MNEMONIC(invlpg, "invlpg");
1569 IEMOP_HLP_MIN_486();
1570 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1571 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1574 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpg, GCPtrEffDst);
1575 IEM_MC_END();
1576}
1577
1578
1579/** Opcode 0x0f 0x01 0xf8. */
1580FNIEMOP_DEF(iemOp_Grp7_swapgs)
1581{
1582 IEMOP_MNEMONIC(swapgs, "swapgs");
1583 IEMOP_HLP_ONLY_64BIT();
1584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1585 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS), iemCImpl_swapgs);
1586}
1587
1588
1589/** Opcode 0x0f 0x01 0xf9. */
1590FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1591{
1592 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1594 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
1595 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1596 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
1597 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
1598 iemCImpl_rdtscp);
1599}
1600
1601
1602/**
1603 * Group 7 jump table, memory variant.
1604 */
1605IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1606{
1607 iemOp_Grp7_sgdt,
1608 iemOp_Grp7_sidt,
1609 iemOp_Grp7_lgdt,
1610 iemOp_Grp7_lidt,
1611 iemOp_Grp7_smsw,
1612 iemOp_InvalidWithRM,
1613 iemOp_Grp7_lmsw,
1614 iemOp_Grp7_invlpg
1615};
1616
1617
1618/** Opcode 0x0f 0x01. */
1619FNIEMOP_DEF(iemOp_Grp7)
1620{
1621 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1622 if (IEM_IS_MODRM_MEM_MODE(bRm))
1623 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1624
1625 switch (IEM_GET_MODRM_REG_8(bRm))
1626 {
1627 case 0:
1628 switch (IEM_GET_MODRM_RM_8(bRm))
1629 {
1630 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1631 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1632 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1633 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1634 }
1635 IEMOP_RAISE_INVALID_OPCODE_RET();
1636
1637 case 1:
1638 switch (IEM_GET_MODRM_RM_8(bRm))
1639 {
1640 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1641 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1642 }
1643 IEMOP_RAISE_INVALID_OPCODE_RET();
1644
1645 case 2:
1646 switch (IEM_GET_MODRM_RM_8(bRm))
1647 {
1648 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1649 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1650 }
1651 IEMOP_RAISE_INVALID_OPCODE_RET();
1652
1653 case 3:
1654 switch (IEM_GET_MODRM_RM_8(bRm))
1655 {
1656 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1657 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1658 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1659 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1660 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1661 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1662 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1663 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1665 }
1666
1667 case 4:
1668 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1669
1670 case 5:
1671 IEMOP_RAISE_INVALID_OPCODE_RET();
1672
1673 case 6:
1674 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1675
1676 case 7:
1677 switch (IEM_GET_MODRM_RM_8(bRm))
1678 {
1679 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1680 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1681 }
1682 IEMOP_RAISE_INVALID_OPCODE_RET();
1683
1684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1685 }
1686}
1687
1688FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1689{
1690 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1691 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1692
1693 if (IEM_IS_MODRM_REG_MODE(bRm))
1694 {
1695 switch (pVCpu->iem.s.enmEffOpSize)
1696 {
1697 case IEMMODE_16BIT:
1698 IEM_MC_BEGIN(0, 0);
1699 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1700 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1701 IEM_MC_ARG(uint16_t, u16Sel, 1);
1702 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1703
1704 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1705 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1706 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1707 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1708
1709 IEM_MC_END();
1710 break;
1711
1712 case IEMMODE_32BIT:
1713 case IEMMODE_64BIT:
1714 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1715 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1716 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1717 IEM_MC_ARG(uint16_t, u16Sel, 1);
1718 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1719
1720 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1721 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1722 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1723 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1724
1725 IEM_MC_END();
1726 break;
1727
1728 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1729 }
1730 }
1731 else
1732 {
1733 switch (pVCpu->iem.s.enmEffOpSize)
1734 {
1735 case IEMMODE_16BIT:
1736 IEM_MC_BEGIN(0, 0);
1737 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1738 IEM_MC_ARG(uint16_t, u16Sel, 1);
1739 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1741
1742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1743 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1744
1745 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1746 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1747 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1748 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1749
1750 IEM_MC_END();
1751 break;
1752
1753 case IEMMODE_32BIT:
1754 case IEMMODE_64BIT:
1755 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1756 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1757 IEM_MC_ARG(uint16_t, u16Sel, 1);
1758 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1760
1761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1762 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1763/** @todo testcase: make sure it's a 16-bit read. */
1764
1765 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1766 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1767 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1768 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1769
1770 IEM_MC_END();
1771 break;
1772
1773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1774 }
1775 }
1776}
1777
1778
1779
1780/**
1781 * @opcode 0x02
1782 * @opflmodify zf
1783 */
1784FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1785{
1786 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1787 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1788}
1789
1790
1791/**
1792 * @opcode 0x03
1793 * @opflmodify zf
1794 */
1795FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1796{
1797 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1798 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1799}
1800
1801
1802/** Opcode 0x0f 0x05. */
1803FNIEMOP_DEF(iemOp_syscall)
1804{
1805 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1807 /** @todo r=aeichner Clobbers cr0 only if this is a 286 LOADALL instruction. */
1808 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1809 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1810 RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_syscall);
1811}
1812
1813
1814/** Opcode 0x0f 0x06. */
1815FNIEMOP_DEF(iemOp_clts)
1816{
1817 IEMOP_MNEMONIC(clts, "clts");
1818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1819 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_clts);
1820}
1821
1822
1823/** Opcode 0x0f 0x07. */
1824FNIEMOP_DEF(iemOp_sysret)
1825{
1826 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1828 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1829 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
1830 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
1831}
1832
1833
1834/** Opcode 0x0f 0x08. */
1835FNIEMOP_DEF(iemOp_invd)
1836{
1837 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1838 IEMOP_HLP_MIN_486();
1839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1840 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invd);
1841}
1842
1843
1844/** Opcode 0x0f 0x09. */
1845FNIEMOP_DEF(iemOp_wbinvd)
1846{
1847 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1848 IEMOP_HLP_MIN_486();
1849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1850 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wbinvd);
1851}
1852
1853
1854/** Opcode 0x0f 0x0b. */
1855FNIEMOP_DEF(iemOp_ud2)
1856{
1857 IEMOP_MNEMONIC(ud2, "ud2");
1858 IEMOP_RAISE_INVALID_OPCODE_RET();
1859}
1860
1861/** Opcode 0x0f 0x0d. */
1862FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1863{
1864 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1865 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1866 {
1867 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1868 IEMOP_RAISE_INVALID_OPCODE_RET();
1869 }
1870
1871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1872 if (IEM_IS_MODRM_REG_MODE(bRm))
1873 {
1874 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1875 IEMOP_RAISE_INVALID_OPCODE_RET();
1876 }
1877
1878 switch (IEM_GET_MODRM_REG_8(bRm))
1879 {
1880 case 2: /* Aliased to /0 for the time being. */
1881 case 4: /* Aliased to /0 for the time being. */
1882 case 5: /* Aliased to /0 for the time being. */
1883 case 6: /* Aliased to /0 for the time being. */
1884 case 7: /* Aliased to /0 for the time being. */
1885 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1886 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1887 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1889 }
1890
1891 IEM_MC_BEGIN(0, 0);
1892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1895 /* Currently a NOP. */
1896 IEM_MC_NOREF(GCPtrEffSrc);
1897 IEM_MC_ADVANCE_RIP_AND_FINISH();
1898 IEM_MC_END();
1899}
1900
1901
1902/** Opcode 0x0f 0x0e. */
1903FNIEMOP_DEF(iemOp_femms)
1904{
1905 IEMOP_MNEMONIC(femms, "femms");
1906
1907 IEM_MC_BEGIN(0, 0);
1908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1909 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
1910 IEM_MC_MAYBE_RAISE_FPU_XCPT();
1911 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
1912 IEM_MC_FPU_FROM_MMX_MODE();
1913 IEM_MC_ADVANCE_RIP_AND_FINISH();
1914 IEM_MC_END();
1915}
1916
1917
1918/** Opcode 0x0f 0x0f. */
1919FNIEMOP_DEF(iemOp_3Dnow)
1920{
1921 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1922 {
1923 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1924 IEMOP_RAISE_INVALID_OPCODE_RET();
1925 }
1926
1927#ifdef IEM_WITH_3DNOW
1928 /* This is pretty sparse, use switch instead of table. */
1929 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1930 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
1931#else
1932 IEMOP_BITCH_ABOUT_STUB();
1933 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1934#endif
1935}
1936
1937
1938/**
1939 * @opcode 0x10
1940 * @oppfx none
1941 * @opcpuid sse
1942 * @opgroup og_sse_simdfp_datamove
1943 * @opxcpttype 4UA
1944 * @optest op1=1 op2=2 -> op1=2
1945 * @optest op1=0 op2=-22 -> op1=-22
1946 */
1947FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1948{
1949 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1951 if (IEM_IS_MODRM_REG_MODE(bRm))
1952 {
1953 /*
1954 * XMM128, XMM128.
1955 */
1956 IEM_MC_BEGIN(0, 0);
1957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
1958 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1959 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1960 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
1961 IEM_GET_MODRM_RM(pVCpu, bRm));
1962 IEM_MC_ADVANCE_RIP_AND_FINISH();
1963 IEM_MC_END();
1964 }
1965 else
1966 {
1967 /*
1968 * XMM128, [mem128].
1969 */
1970 IEM_MC_BEGIN(0, 0);
1971 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1973
1974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
1976 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1977 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1978
1979 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1980 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1981
1982 IEM_MC_ADVANCE_RIP_AND_FINISH();
1983 IEM_MC_END();
1984 }
1985
1986}
1987
1988
1989/**
1990 * @opcode 0x10
1991 * @oppfx 0x66
1992 * @opcpuid sse2
1993 * @opgroup og_sse2_pcksclr_datamove
1994 * @opxcpttype 4UA
1995 * @optest op1=1 op2=2 -> op1=2
1996 * @optest op1=0 op2=-42 -> op1=-42
1997 */
1998FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1999{
2000 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2001 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2002 if (IEM_IS_MODRM_REG_MODE(bRm))
2003 {
2004 /*
2005 * XMM128, XMM128.
2006 */
2007 IEM_MC_BEGIN(0, 0);
2008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2009 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2010 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2011 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2012 IEM_GET_MODRM_RM(pVCpu, bRm));
2013 IEM_MC_ADVANCE_RIP_AND_FINISH();
2014 IEM_MC_END();
2015 }
2016 else
2017 {
2018 /*
2019 * XMM128, [mem128].
2020 */
2021 IEM_MC_BEGIN(0, 0);
2022 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2024
2025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2027 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2028 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2029
2030 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2031 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2032
2033 IEM_MC_ADVANCE_RIP_AND_FINISH();
2034 IEM_MC_END();
2035 }
2036}
2037
2038
2039/**
2040 * @opcode 0x10
2041 * @oppfx 0xf3
2042 * @opcpuid sse
2043 * @opgroup og_sse_simdfp_datamove
2044 * @opxcpttype 5
2045 * @optest op1=1 op2=2 -> op1=2
2046 * @optest op1=0 op2=-22 -> op1=-22
2047 */
2048FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2049{
2050 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2052 if (IEM_IS_MODRM_REG_MODE(bRm))
2053 {
2054 /*
2055 * XMM32, XMM32.
2056 */
2057 IEM_MC_BEGIN(0, 0);
2058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2059 IEM_MC_LOCAL(uint32_t, uSrc);
2060
2061 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2062 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2063 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2064 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2065
2066 IEM_MC_ADVANCE_RIP_AND_FINISH();
2067 IEM_MC_END();
2068 }
2069 else
2070 {
2071 /*
2072 * XMM128, [mem32].
2073 */
2074 IEM_MC_BEGIN(0, 0);
2075 IEM_MC_LOCAL(uint32_t, uSrc);
2076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2077
2078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2080 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2081 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2082
2083 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2084 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2085
2086 IEM_MC_ADVANCE_RIP_AND_FINISH();
2087 IEM_MC_END();
2088 }
2089}
2090
2091
2092/**
2093 * @opcode 0x10
2094 * @oppfx 0xf2
2095 * @opcpuid sse2
2096 * @opgroup og_sse2_pcksclr_datamove
2097 * @opxcpttype 5
2098 * @optest op1=1 op2=2 -> op1=2
2099 * @optest op1=0 op2=-42 -> op1=-42
2100 */
2101FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2102{
2103 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2105 if (IEM_IS_MODRM_REG_MODE(bRm))
2106 {
2107 /*
2108 * XMM64, XMM64.
2109 */
2110 IEM_MC_BEGIN(0, 0);
2111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2112 IEM_MC_LOCAL(uint64_t, uSrc);
2113
2114 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2115 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2116 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2117 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2118
2119 IEM_MC_ADVANCE_RIP_AND_FINISH();
2120 IEM_MC_END();
2121 }
2122 else
2123 {
2124 /*
2125 * XMM128, [mem64].
2126 */
2127 IEM_MC_BEGIN(0, 0);
2128 IEM_MC_LOCAL(uint64_t, uSrc);
2129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2130
2131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2133 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2134 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2135
2136 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2137 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2138
2139 IEM_MC_ADVANCE_RIP_AND_FINISH();
2140 IEM_MC_END();
2141 }
2142}
2143
2144
2145/**
2146 * @opcode 0x11
2147 * @oppfx none
2148 * @opcpuid sse
2149 * @opgroup og_sse_simdfp_datamove
2150 * @opxcpttype 4UA
2151 * @optest op1=1 op2=2 -> op1=2
2152 * @optest op1=0 op2=-42 -> op1=-42
2153 */
2154FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2155{
2156 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2157 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2158 if (IEM_IS_MODRM_REG_MODE(bRm))
2159 {
2160 /*
2161 * XMM128, XMM128.
2162 */
2163 IEM_MC_BEGIN(0, 0);
2164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2165 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2166 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2167 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2168 IEM_GET_MODRM_REG(pVCpu, bRm));
2169 IEM_MC_ADVANCE_RIP_AND_FINISH();
2170 IEM_MC_END();
2171 }
2172 else
2173 {
2174 /*
2175 * [mem128], XMM128.
2176 */
2177 IEM_MC_BEGIN(0, 0);
2178 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2180
2181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2183 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2184 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2185
2186 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2187 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2188
2189 IEM_MC_ADVANCE_RIP_AND_FINISH();
2190 IEM_MC_END();
2191 }
2192}
2193
2194
2195/**
2196 * @opcode 0x11
2197 * @oppfx 0x66
2198 * @opcpuid sse2
2199 * @opgroup og_sse2_pcksclr_datamove
2200 * @opxcpttype 4UA
2201 * @optest op1=1 op2=2 -> op1=2
2202 * @optest op1=0 op2=-42 -> op1=-42
2203 */
2204FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2205{
2206 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2208 if (IEM_IS_MODRM_REG_MODE(bRm))
2209 {
2210 /*
2211 * XMM128, XMM128.
2212 */
2213 IEM_MC_BEGIN(0, 0);
2214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2215 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2216 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2217 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2218 IEM_GET_MODRM_REG(pVCpu, bRm));
2219 IEM_MC_ADVANCE_RIP_AND_FINISH();
2220 IEM_MC_END();
2221 }
2222 else
2223 {
2224 /*
2225 * [mem128], XMM128.
2226 */
2227 IEM_MC_BEGIN(0, 0);
2228 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2230
2231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2233 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2234 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2235
2236 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2237 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2238
2239 IEM_MC_ADVANCE_RIP_AND_FINISH();
2240 IEM_MC_END();
2241 }
2242}
2243
2244
2245/**
2246 * @opcode 0x11
2247 * @oppfx 0xf3
2248 * @opcpuid sse
2249 * @opgroup og_sse_simdfp_datamove
2250 * @opxcpttype 5
2251 * @optest op1=1 op2=2 -> op1=2
2252 * @optest op1=0 op2=-22 -> op1=-22
2253 */
2254FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2255{
2256 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2257 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2258 if (IEM_IS_MODRM_REG_MODE(bRm))
2259 {
2260 /*
2261 * XMM32, XMM32.
2262 */
2263 IEM_MC_BEGIN(0, 0);
2264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2265 IEM_MC_LOCAL(uint32_t, uSrc);
2266
2267 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2268 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2269 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2270 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2271
2272 IEM_MC_ADVANCE_RIP_AND_FINISH();
2273 IEM_MC_END();
2274 }
2275 else
2276 {
2277 /*
2278 * [mem32], XMM32.
2279 */
2280 IEM_MC_BEGIN(0, 0);
2281 IEM_MC_LOCAL(uint32_t, uSrc);
2282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2283
2284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2286 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2287 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2288
2289 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2290 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2291
2292 IEM_MC_ADVANCE_RIP_AND_FINISH();
2293 IEM_MC_END();
2294 }
2295}
2296
2297
2298/**
2299 * @opcode 0x11
2300 * @oppfx 0xf2
2301 * @opcpuid sse2
2302 * @opgroup og_sse2_pcksclr_datamove
2303 * @opxcpttype 5
2304 * @optest op1=1 op2=2 -> op1=2
2305 * @optest op1=0 op2=-42 -> op1=-42
2306 */
2307FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2308{
2309 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2311 if (IEM_IS_MODRM_REG_MODE(bRm))
2312 {
2313 /*
2314 * XMM64, XMM64.
2315 */
2316 IEM_MC_BEGIN(0, 0);
2317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2318 IEM_MC_LOCAL(uint64_t, uSrc);
2319
2320 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2321 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2322 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2323 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2324
2325 IEM_MC_ADVANCE_RIP_AND_FINISH();
2326 IEM_MC_END();
2327 }
2328 else
2329 {
2330 /*
2331 * [mem64], XMM64.
2332 */
2333 IEM_MC_BEGIN(0, 0);
2334 IEM_MC_LOCAL(uint64_t, uSrc);
2335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2336
2337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2339 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2340 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2341
2342 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2343 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2344
2345 IEM_MC_ADVANCE_RIP_AND_FINISH();
2346 IEM_MC_END();
2347 }
2348}
2349
2350
2351FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2352{
2353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2354 if (IEM_IS_MODRM_REG_MODE(bRm))
2355 {
2356 /**
2357 * @opcode 0x12
2358 * @opcodesub 11 mr/reg
2359 * @oppfx none
2360 * @opcpuid sse
2361 * @opgroup og_sse_simdfp_datamove
2362 * @opxcpttype 5
2363 * @optest op1=1 op2=2 -> op1=2
2364 * @optest op1=0 op2=-42 -> op1=-42
2365 */
2366 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2367
2368 IEM_MC_BEGIN(0, 0);
2369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2370 IEM_MC_LOCAL(uint64_t, uSrc);
2371
2372 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2373 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2374 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2375 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2376
2377 IEM_MC_ADVANCE_RIP_AND_FINISH();
2378 IEM_MC_END();
2379 }
2380 else
2381 {
2382 /**
2383 * @opdone
2384 * @opcode 0x12
2385 * @opcodesub !11 mr/reg
2386 * @oppfx none
2387 * @opcpuid sse
2388 * @opgroup og_sse_simdfp_datamove
2389 * @opxcpttype 5
2390 * @optest op1=1 op2=2 -> op1=2
2391 * @optest op1=0 op2=-42 -> op1=-42
2392 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2393 */
2394 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2395
2396 IEM_MC_BEGIN(0, 0);
2397 IEM_MC_LOCAL(uint64_t, uSrc);
2398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2399
2400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2402 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2403 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2404
2405 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2406 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2407
2408 IEM_MC_ADVANCE_RIP_AND_FINISH();
2409 IEM_MC_END();
2410 }
2411}
2412
2413
2414/**
2415 * @opcode 0x12
2416 * @opcodesub !11 mr/reg
2417 * @oppfx 0x66
2418 * @opcpuid sse2
2419 * @opgroup og_sse2_pcksclr_datamove
2420 * @opxcpttype 5
2421 * @optest op1=1 op2=2 -> op1=2
2422 * @optest op1=0 op2=-42 -> op1=-42
2423 */
2424FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2425{
2426 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2427 if (IEM_IS_MODRM_MEM_MODE(bRm))
2428 {
2429 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2430
2431 IEM_MC_BEGIN(0, 0);
2432 IEM_MC_LOCAL(uint64_t, uSrc);
2433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2434
2435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2437 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2438 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2439
2440 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2441 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2442
2443 IEM_MC_ADVANCE_RIP_AND_FINISH();
2444 IEM_MC_END();
2445 }
2446
2447 /**
2448 * @opdone
2449 * @opmnemonic ud660f12m3
2450 * @opcode 0x12
2451 * @opcodesub 11 mr/reg
2452 * @oppfx 0x66
2453 * @opunused immediate
2454 * @opcpuid sse
2455 * @optest ->
2456 */
2457 else
2458 IEMOP_RAISE_INVALID_OPCODE_RET();
2459}
2460
2461
2462/**
2463 * @opcode 0x12
2464 * @oppfx 0xf3
2465 * @opcpuid sse3
2466 * @opgroup og_sse3_pcksclr_datamove
2467 * @opxcpttype 4
2468 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2469 * op1=0x00000002000000020000000100000001
2470 */
2471FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2472{
2473 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2474 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2475 if (IEM_IS_MODRM_REG_MODE(bRm))
2476 {
2477 /*
2478 * XMM, XMM.
2479 */
2480 IEM_MC_BEGIN(0, 0);
2481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2482 IEM_MC_LOCAL(RTUINT128U, uSrc);
2483
2484 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2485 IEM_MC_PREPARE_SSE_USAGE();
2486
2487 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2488 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2489 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2490 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2491 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2492
2493 IEM_MC_ADVANCE_RIP_AND_FINISH();
2494 IEM_MC_END();
2495 }
2496 else
2497 {
2498 /*
2499 * XMM, [mem128].
2500 */
2501 IEM_MC_BEGIN(0, 0);
2502 IEM_MC_LOCAL(RTUINT128U, uSrc);
2503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2504
2505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2507 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2508 IEM_MC_PREPARE_SSE_USAGE();
2509
2510 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2511 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2512 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2513 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2514 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2515
2516 IEM_MC_ADVANCE_RIP_AND_FINISH();
2517 IEM_MC_END();
2518 }
2519}
2520
2521
2522/**
2523 * @opcode 0x12
2524 * @oppfx 0xf2
2525 * @opcpuid sse3
2526 * @opgroup og_sse3_pcksclr_datamove
2527 * @opxcpttype 5
2528 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2529 * op1=0x22222222111111112222222211111111
2530 */
2531FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2532{
2533 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2535 if (IEM_IS_MODRM_REG_MODE(bRm))
2536 {
2537 /*
2538 * XMM128, XMM64.
2539 */
2540 IEM_MC_BEGIN(0, 0);
2541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2542 IEM_MC_LOCAL(uint64_t, uSrc);
2543
2544 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2545 IEM_MC_PREPARE_SSE_USAGE();
2546
2547 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2548 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2549 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2550
2551 IEM_MC_ADVANCE_RIP_AND_FINISH();
2552 IEM_MC_END();
2553 }
2554 else
2555 {
2556 /*
2557 * XMM128, [mem64].
2558 */
2559 IEM_MC_BEGIN(0, 0);
2560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2561 IEM_MC_LOCAL(uint64_t, uSrc);
2562
2563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2565 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2566 IEM_MC_PREPARE_SSE_USAGE();
2567
2568 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2569 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2570 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2571
2572 IEM_MC_ADVANCE_RIP_AND_FINISH();
2573 IEM_MC_END();
2574 }
2575}
2576
2577
2578/**
2579 * @opcode 0x13
2580 * @opcodesub !11 mr/reg
2581 * @oppfx none
2582 * @opcpuid sse
2583 * @opgroup og_sse_simdfp_datamove
2584 * @opxcpttype 5
2585 * @optest op1=1 op2=2 -> op1=2
2586 * @optest op1=0 op2=-42 -> op1=-42
2587 */
2588FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2589{
2590 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2591 if (IEM_IS_MODRM_MEM_MODE(bRm))
2592 {
2593 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2594
2595 IEM_MC_BEGIN(0, 0);
2596 IEM_MC_LOCAL(uint64_t, uSrc);
2597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2598
2599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2601 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2602 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2603
2604 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2605 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2606
2607 IEM_MC_ADVANCE_RIP_AND_FINISH();
2608 IEM_MC_END();
2609 }
2610
2611 /**
2612 * @opdone
2613 * @opmnemonic ud0f13m3
2614 * @opcode 0x13
2615 * @opcodesub 11 mr/reg
2616 * @oppfx none
2617 * @opunused immediate
2618 * @opcpuid sse
2619 * @optest ->
2620 */
2621 else
2622 IEMOP_RAISE_INVALID_OPCODE_RET();
2623}
2624
2625
2626/**
2627 * @opcode 0x13
2628 * @opcodesub !11 mr/reg
2629 * @oppfx 0x66
2630 * @opcpuid sse2
2631 * @opgroup og_sse2_pcksclr_datamove
2632 * @opxcpttype 5
2633 * @optest op1=1 op2=2 -> op1=2
2634 * @optest op1=0 op2=-42 -> op1=-42
2635 */
2636FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2637{
2638 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2639 if (IEM_IS_MODRM_MEM_MODE(bRm))
2640 {
2641 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2642
2643 IEM_MC_BEGIN(0, 0);
2644 IEM_MC_LOCAL(uint64_t, uSrc);
2645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2646
2647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2649 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2650 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2651
2652 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2653 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2654
2655 IEM_MC_ADVANCE_RIP_AND_FINISH();
2656 IEM_MC_END();
2657 }
2658
2659 /**
2660 * @opdone
2661 * @opmnemonic ud660f13m3
2662 * @opcode 0x13
2663 * @opcodesub 11 mr/reg
2664 * @oppfx 0x66
2665 * @opunused immediate
2666 * @opcpuid sse
2667 * @optest ->
2668 */
2669 else
2670 IEMOP_RAISE_INVALID_OPCODE_RET();
2671}
2672
2673
2674/**
2675 * @opmnemonic udf30f13
2676 * @opcode 0x13
2677 * @oppfx 0xf3
2678 * @opunused intel-modrm
2679 * @opcpuid sse
2680 * @optest ->
2681 * @opdone
2682 */
2683
2684/**
2685 * @opmnemonic udf20f13
2686 * @opcode 0x13
2687 * @oppfx 0xf2
2688 * @opunused intel-modrm
2689 * @opcpuid sse
2690 * @optest ->
2691 * @opdone
2692 */
2693
2694/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2695FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2696{
2697 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2698 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2699}
2700
2701
2702/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2703FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2704{
2705 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2706 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2707}
2708
2709
2710/**
2711 * @opdone
2712 * @opmnemonic udf30f14
2713 * @opcode 0x14
2714 * @oppfx 0xf3
2715 * @opunused intel-modrm
2716 * @opcpuid sse
2717 * @optest ->
2718 * @opdone
2719 */
2720
2721/**
2722 * @opmnemonic udf20f14
2723 * @opcode 0x14
2724 * @oppfx 0xf2
2725 * @opunused intel-modrm
2726 * @opcpuid sse
2727 * @optest ->
2728 * @opdone
2729 */
2730
2731/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2732FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2733{
2734 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2735 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2736}
2737
2738
2739/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2740FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2741{
2742 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2743 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2744}
2745
2746
2747/* Opcode 0xf3 0x0f 0x15 - invalid */
2748/* Opcode 0xf2 0x0f 0x15 - invalid */
2749
2750/**
2751 * @opdone
2752 * @opmnemonic udf30f15
2753 * @opcode 0x15
2754 * @oppfx 0xf3
2755 * @opunused intel-modrm
2756 * @opcpuid sse
2757 * @optest ->
2758 * @opdone
2759 */
2760
2761/**
2762 * @opmnemonic udf20f15
2763 * @opcode 0x15
2764 * @oppfx 0xf2
2765 * @opunused intel-modrm
2766 * @opcpuid sse
2767 * @optest ->
2768 * @opdone
2769 */
2770
2771FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2772{
2773 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2774 if (IEM_IS_MODRM_REG_MODE(bRm))
2775 {
2776 /**
2777 * @opcode 0x16
2778 * @opcodesub 11 mr/reg
2779 * @oppfx none
2780 * @opcpuid sse
2781 * @opgroup og_sse_simdfp_datamove
2782 * @opxcpttype 5
2783 * @optest op1=1 op2=2 -> op1=2
2784 * @optest op1=0 op2=-42 -> op1=-42
2785 */
2786 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2787
2788 IEM_MC_BEGIN(0, 0);
2789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2790 IEM_MC_LOCAL(uint64_t, uSrc);
2791
2792 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2793 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2794 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2795 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2796
2797 IEM_MC_ADVANCE_RIP_AND_FINISH();
2798 IEM_MC_END();
2799 }
2800 else
2801 {
2802 /**
2803 * @opdone
2804 * @opcode 0x16
2805 * @opcodesub !11 mr/reg
2806 * @oppfx none
2807 * @opcpuid sse
2808 * @opgroup og_sse_simdfp_datamove
2809 * @opxcpttype 5
2810 * @optest op1=1 op2=2 -> op1=2
2811 * @optest op1=0 op2=-42 -> op1=-42
2812 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2813 */
2814 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2815
2816 IEM_MC_BEGIN(0, 0);
2817 IEM_MC_LOCAL(uint64_t, uSrc);
2818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2819
2820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2822 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2823 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2824
2825 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2826 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2827
2828 IEM_MC_ADVANCE_RIP_AND_FINISH();
2829 IEM_MC_END();
2830 }
2831}
2832
2833
2834/**
2835 * @opcode 0x16
2836 * @opcodesub !11 mr/reg
2837 * @oppfx 0x66
2838 * @opcpuid sse2
2839 * @opgroup og_sse2_pcksclr_datamove
2840 * @opxcpttype 5
2841 * @optest op1=1 op2=2 -> op1=2
2842 * @optest op1=0 op2=-42 -> op1=-42
2843 */
2844FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2845{
2846 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2847 if (IEM_IS_MODRM_MEM_MODE(bRm))
2848 {
2849 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2850
2851 IEM_MC_BEGIN(0, 0);
2852 IEM_MC_LOCAL(uint64_t, uSrc);
2853 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2854
2855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2857 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2858 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2859
2860 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2861 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2862
2863 IEM_MC_ADVANCE_RIP_AND_FINISH();
2864 IEM_MC_END();
2865 }
2866
2867 /**
2868 * @opdone
2869 * @opmnemonic ud660f16m3
2870 * @opcode 0x16
2871 * @opcodesub 11 mr/reg
2872 * @oppfx 0x66
2873 * @opunused immediate
2874 * @opcpuid sse
2875 * @optest ->
2876 */
2877 else
2878 IEMOP_RAISE_INVALID_OPCODE_RET();
2879}
2880
2881
2882/**
2883 * @opcode 0x16
2884 * @oppfx 0xf3
2885 * @opcpuid sse3
2886 * @opgroup og_sse3_pcksclr_datamove
2887 * @opxcpttype 4
2888 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
2889 * op1=0x00000002000000020000000100000001
2890 */
2891FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
2892{
2893 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2895 if (IEM_IS_MODRM_REG_MODE(bRm))
2896 {
2897 /*
2898 * XMM128, XMM128.
2899 */
2900 IEM_MC_BEGIN(0, 0);
2901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2902 IEM_MC_LOCAL(RTUINT128U, uSrc);
2903
2904 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2905 IEM_MC_PREPARE_SSE_USAGE();
2906
2907 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2908 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
2909 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
2910 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
2911 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
2912
2913 IEM_MC_ADVANCE_RIP_AND_FINISH();
2914 IEM_MC_END();
2915 }
2916 else
2917 {
2918 /*
2919 * XMM128, [mem128].
2920 */
2921 IEM_MC_BEGIN(0, 0);
2922 IEM_MC_LOCAL(RTUINT128U, uSrc);
2923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2924
2925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2927 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2928 IEM_MC_PREPARE_SSE_USAGE();
2929
2930 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2931 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
2932 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
2933 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
2934 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
2935
2936 IEM_MC_ADVANCE_RIP_AND_FINISH();
2937 IEM_MC_END();
2938 }
2939}
2940
2941/**
2942 * @opdone
2943 * @opmnemonic udf30f16
2944 * @opcode 0x16
2945 * @oppfx 0xf2
2946 * @opunused intel-modrm
2947 * @opcpuid sse
2948 * @optest ->
2949 * @opdone
2950 */
2951
2952
2953/**
2954 * @opcode 0x17
2955 * @opcodesub !11 mr/reg
2956 * @oppfx none
2957 * @opcpuid sse
2958 * @opgroup og_sse_simdfp_datamove
2959 * @opxcpttype 5
2960 * @optest op1=1 op2=2 -> op1=2
2961 * @optest op1=0 op2=-42 -> op1=-42
2962 */
2963FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2964{
2965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2966 if (IEM_IS_MODRM_MEM_MODE(bRm))
2967 {
2968 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2969
2970 IEM_MC_BEGIN(0, 0);
2971 IEM_MC_LOCAL(uint64_t, uSrc);
2972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2973
2974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2976 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2977 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2978
2979 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
2980 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2981
2982 IEM_MC_ADVANCE_RIP_AND_FINISH();
2983 IEM_MC_END();
2984 }
2985
2986 /**
2987 * @opdone
2988 * @opmnemonic ud0f17m3
2989 * @opcode 0x17
2990 * @opcodesub 11 mr/reg
2991 * @oppfx none
2992 * @opunused immediate
2993 * @opcpuid sse
2994 * @optest ->
2995 */
2996 else
2997 IEMOP_RAISE_INVALID_OPCODE_RET();
2998}
2999
3000
3001/**
3002 * @opcode 0x17
3003 * @opcodesub !11 mr/reg
3004 * @oppfx 0x66
3005 * @opcpuid sse2
3006 * @opgroup og_sse2_pcksclr_datamove
3007 * @opxcpttype 5
3008 * @optest op1=1 op2=2 -> op1=2
3009 * @optest op1=0 op2=-42 -> op1=-42
3010 */
3011FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3012{
3013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3014 if (IEM_IS_MODRM_MEM_MODE(bRm))
3015 {
3016 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3017
3018 IEM_MC_BEGIN(0, 0);
3019 IEM_MC_LOCAL(uint64_t, uSrc);
3020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3021
3022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3024 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3025 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3026
3027 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3028 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3029
3030 IEM_MC_ADVANCE_RIP_AND_FINISH();
3031 IEM_MC_END();
3032 }
3033
3034 /**
3035 * @opdone
3036 * @opmnemonic ud660f17m3
3037 * @opcode 0x17
3038 * @opcodesub 11 mr/reg
3039 * @oppfx 0x66
3040 * @opunused immediate
3041 * @opcpuid sse
3042 * @optest ->
3043 */
3044 else
3045 IEMOP_RAISE_INVALID_OPCODE_RET();
3046}
3047
3048
3049/**
3050 * @opdone
3051 * @opmnemonic udf30f17
3052 * @opcode 0x17
3053 * @oppfx 0xf3
3054 * @opunused intel-modrm
3055 * @opcpuid sse
3056 * @optest ->
3057 * @opdone
3058 */
3059
3060/**
3061 * @opmnemonic udf20f17
3062 * @opcode 0x17
3063 * @oppfx 0xf2
3064 * @opunused intel-modrm
3065 * @opcpuid sse
3066 * @optest ->
3067 * @opdone
3068 */
3069
3070
3071/** Opcode 0x0f 0x18. */
3072FNIEMOP_DEF(iemOp_prefetch_Grp16)
3073{
3074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3075 if (IEM_IS_MODRM_MEM_MODE(bRm))
3076 {
3077 switch (IEM_GET_MODRM_REG_8(bRm))
3078 {
3079 case 4: /* Aliased to /0 for the time being according to AMD. */
3080 case 5: /* Aliased to /0 for the time being according to AMD. */
3081 case 6: /* Aliased to /0 for the time being according to AMD. */
3082 case 7: /* Aliased to /0 for the time being according to AMD. */
3083 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3084 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3085 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3086 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3087 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3088 }
3089
3090 IEM_MC_BEGIN(0, 0);
3091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3094 /* Currently a NOP. */
3095 IEM_MC_NOREF(GCPtrEffSrc);
3096 IEM_MC_ADVANCE_RIP_AND_FINISH();
3097 IEM_MC_END();
3098 }
3099 else
3100 IEMOP_RAISE_INVALID_OPCODE_RET();
3101}
3102
3103
3104/** Opcode 0x0f 0x19..0x1f. */
3105FNIEMOP_DEF(iemOp_nop_Ev)
3106{
3107 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3109 if (IEM_IS_MODRM_REG_MODE(bRm))
3110 {
3111 IEM_MC_BEGIN(0, 0);
3112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3113 IEM_MC_ADVANCE_RIP_AND_FINISH();
3114 IEM_MC_END();
3115 }
3116 else
3117 {
3118 IEM_MC_BEGIN(0, 0);
3119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3122 /* Currently a NOP. */
3123 IEM_MC_NOREF(GCPtrEffSrc);
3124 IEM_MC_ADVANCE_RIP_AND_FINISH();
3125 IEM_MC_END();
3126 }
3127}
3128
3129
3130/** Opcode 0x0f 0x20. */
3131FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3132{
3133 /* mod is ignored, as is operand size overrides. */
3134/** @todo testcase: check memory encoding. */
3135 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3136 IEMOP_HLP_MIN_386();
3137 if (IEM_IS_64BIT_CODE(pVCpu))
3138 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3139 else
3140 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3141
3142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3143 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3144 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3145 {
3146 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3147 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3148 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3149 iCrReg |= 8;
3150 }
3151 switch (iCrReg)
3152 {
3153 case 0: case 2: case 3: case 4: case 8:
3154 break;
3155 default:
3156 IEMOP_RAISE_INVALID_OPCODE_RET();
3157 }
3158 IEMOP_HLP_DONE_DECODING();
3159
3160 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3161 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3162 iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3163}
3164
3165
3166/** Opcode 0x0f 0x21. */
3167FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3168{
3169/** @todo testcase: check memory encoding. */
3170 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3171 IEMOP_HLP_MIN_386();
3172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3174 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3175 IEMOP_RAISE_INVALID_OPCODE_RET();
3176 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3177 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3178 iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3179}
3180
3181
3182/** Opcode 0x0f 0x22. */
3183FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3184{
3185 /* mod is ignored, as is operand size overrides. */
3186 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3187 IEMOP_HLP_MIN_386();
3188 if (IEM_IS_64BIT_CODE(pVCpu))
3189 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3190 else
3191 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3192
3193 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3194 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3195 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3196 {
3197 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3198 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3199 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3200 iCrReg |= 8;
3201 }
3202 switch (iCrReg)
3203 {
3204 case 0: case 2: case 3: case 4: case 8:
3205 break;
3206 default:
3207 IEMOP_RAISE_INVALID_OPCODE_RET();
3208 }
3209 IEMOP_HLP_DONE_DECODING();
3210
3211 /** @todo r=aeichner Split this up as flushing the cr0 is excessive for crX != 0? */
3212 if (iCrReg & (2 | 8))
3213 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, 0,
3214 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3215 else
3216 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0) | RT_BIT_64(kIemNativeGstReg_Cr4),
3217 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3218}
3219
3220
3221/** Opcode 0x0f 0x23. */
3222FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3223{
3224 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3225 IEMOP_HLP_MIN_386();
3226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3228 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3229 IEMOP_RAISE_INVALID_OPCODE_RET();
3230 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, 0,
3231 iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3232}
3233
3234
3235/** Opcode 0x0f 0x24. */
3236FNIEMOP_DEF(iemOp_mov_Rd_Td)
3237{
3238 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3239 IEMOP_HLP_MIN_386();
3240 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3242 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3243 IEMOP_RAISE_INVALID_OPCODE_RET();
3244 IEM_MC_DEFER_TO_CIMPL_2_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3245 iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3246}
3247
3248
3249/** Opcode 0x0f 0x26. */
3250FNIEMOP_DEF(iemOp_mov_Td_Rd)
3251{
3252 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3253 IEMOP_HLP_MIN_386();
3254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3256 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3257 IEMOP_RAISE_INVALID_OPCODE_RET();
3258 IEM_MC_DEFER_TO_CIMPL_2_RET(0, 0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3259}
3260
3261
3262/**
3263 * @opcode 0x28
3264 * @oppfx none
3265 * @opcpuid sse
3266 * @opgroup og_sse_simdfp_datamove
3267 * @opxcpttype 1
3268 * @optest op1=1 op2=2 -> op1=2
3269 * @optest op1=0 op2=-42 -> op1=-42
3270 */
3271FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3272{
3273 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3274 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3275 if (IEM_IS_MODRM_REG_MODE(bRm))
3276 {
3277 /*
3278 * Register, register.
3279 */
3280 IEM_MC_BEGIN(0, 0);
3281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3282 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3283 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3284 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3285 IEM_GET_MODRM_RM(pVCpu, bRm));
3286 IEM_MC_ADVANCE_RIP_AND_FINISH();
3287 IEM_MC_END();
3288 }
3289 else
3290 {
3291 /*
3292 * Register, memory.
3293 */
3294 IEM_MC_BEGIN(0, 0);
3295 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3297
3298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3300 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3301 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3302
3303 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3304 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3305
3306 IEM_MC_ADVANCE_RIP_AND_FINISH();
3307 IEM_MC_END();
3308 }
3309}
3310
3311/**
3312 * @opcode 0x28
3313 * @oppfx 66
3314 * @opcpuid sse2
3315 * @opgroup og_sse2_pcksclr_datamove
3316 * @opxcpttype 1
3317 * @optest op1=1 op2=2 -> op1=2
3318 * @optest op1=0 op2=-42 -> op1=-42
3319 */
3320FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3321{
3322 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3323 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3324 if (IEM_IS_MODRM_REG_MODE(bRm))
3325 {
3326 /*
3327 * Register, register.
3328 */
3329 IEM_MC_BEGIN(0, 0);
3330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3331 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3332 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3333 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3334 IEM_GET_MODRM_RM(pVCpu, bRm));
3335 IEM_MC_ADVANCE_RIP_AND_FINISH();
3336 IEM_MC_END();
3337 }
3338 else
3339 {
3340 /*
3341 * Register, memory.
3342 */
3343 IEM_MC_BEGIN(0, 0);
3344 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3346
3347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3349 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3350 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3351
3352 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3353 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3354
3355 IEM_MC_ADVANCE_RIP_AND_FINISH();
3356 IEM_MC_END();
3357 }
3358}
3359
3360/* Opcode 0xf3 0x0f 0x28 - invalid */
3361/* Opcode 0xf2 0x0f 0x28 - invalid */
3362
3363/**
3364 * @opcode 0x29
3365 * @oppfx none
3366 * @opcpuid sse
3367 * @opgroup og_sse_simdfp_datamove
3368 * @opxcpttype 1
3369 * @optest op1=1 op2=2 -> op1=2
3370 * @optest op1=0 op2=-42 -> op1=-42
3371 */
3372FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3373{
3374 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3375 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3376 if (IEM_IS_MODRM_REG_MODE(bRm))
3377 {
3378 /*
3379 * Register, register.
3380 */
3381 IEM_MC_BEGIN(0, 0);
3382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3383 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3384 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3385 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3386 IEM_GET_MODRM_REG(pVCpu, bRm));
3387 IEM_MC_ADVANCE_RIP_AND_FINISH();
3388 IEM_MC_END();
3389 }
3390 else
3391 {
3392 /*
3393 * Memory, register.
3394 */
3395 IEM_MC_BEGIN(0, 0);
3396 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3398
3399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3401 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3402 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3403
3404 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3405 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3406
3407 IEM_MC_ADVANCE_RIP_AND_FINISH();
3408 IEM_MC_END();
3409 }
3410}
3411
3412/**
3413 * @opcode 0x29
3414 * @oppfx 66
3415 * @opcpuid sse2
3416 * @opgroup og_sse2_pcksclr_datamove
3417 * @opxcpttype 1
3418 * @optest op1=1 op2=2 -> op1=2
3419 * @optest op1=0 op2=-42 -> op1=-42
3420 */
3421FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3422{
3423 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3425 if (IEM_IS_MODRM_REG_MODE(bRm))
3426 {
3427 /*
3428 * Register, register.
3429 */
3430 IEM_MC_BEGIN(0, 0);
3431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3432 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3433 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3434 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3435 IEM_GET_MODRM_REG(pVCpu, bRm));
3436 IEM_MC_ADVANCE_RIP_AND_FINISH();
3437 IEM_MC_END();
3438 }
3439 else
3440 {
3441 /*
3442 * Memory, register.
3443 */
3444 IEM_MC_BEGIN(0, 0);
3445 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3447
3448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3450 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3451 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3452
3453 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3454 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3455
3456 IEM_MC_ADVANCE_RIP_AND_FINISH();
3457 IEM_MC_END();
3458 }
3459}
3460
3461/* Opcode 0xf3 0x0f 0x29 - invalid */
3462/* Opcode 0xf2 0x0f 0x29 - invalid */
3463
3464
3465/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3466FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3467{
3468 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3470 if (IEM_IS_MODRM_REG_MODE(bRm))
3471 {
3472 /*
3473 * XMM, MMX
3474 */
3475 IEM_MC_BEGIN(0, 0);
3476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3477 IEM_MC_LOCAL(X86XMMREG, Dst);
3478 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3479 IEM_MC_ARG(uint64_t, u64Src, 1);
3480 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3481 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3482 IEM_MC_PREPARE_FPU_USAGE();
3483 IEM_MC_FPU_TO_MMX_MODE();
3484
3485 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3486 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3487
3488 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2ps_u128, pDst, u64Src);
3489 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3490 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3491
3492 IEM_MC_ADVANCE_RIP_AND_FINISH();
3493 IEM_MC_END();
3494 }
3495 else
3496 {
3497 /*
3498 * XMM, [mem64]
3499 */
3500 IEM_MC_BEGIN(0, 0);
3501 IEM_MC_LOCAL(X86XMMREG, Dst);
3502 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3503 IEM_MC_ARG(uint64_t, u64Src, 1);
3504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3505
3506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3508 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3509 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3510 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3511
3512 IEM_MC_PREPARE_FPU_USAGE();
3513 IEM_MC_FPU_TO_MMX_MODE();
3514
3515 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2ps_u128, pDst, u64Src);
3516 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3517 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3518
3519 IEM_MC_ADVANCE_RIP_AND_FINISH();
3520 IEM_MC_END();
3521 }
3522}
3523
3524
3525/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3526FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3527{
3528 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3529 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3530 if (IEM_IS_MODRM_REG_MODE(bRm))
3531 {
3532 /*
3533 * XMM, MMX
3534 */
3535 IEM_MC_BEGIN(0, 0);
3536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3537 IEM_MC_LOCAL(X86XMMREG, Dst);
3538 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3539 IEM_MC_ARG(uint64_t, u64Src, 1);
3540 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3541 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3542 IEM_MC_PREPARE_FPU_USAGE();
3543 IEM_MC_FPU_TO_MMX_MODE();
3544
3545 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3546
3547 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2pd_u128, pDst, u64Src);
3548 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3549 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3550
3551 IEM_MC_ADVANCE_RIP_AND_FINISH();
3552 IEM_MC_END();
3553 }
3554 else
3555 {
3556 /*
3557 * XMM, [mem64]
3558 */
3559 IEM_MC_BEGIN(0, 0);
3560 IEM_MC_LOCAL(X86XMMREG, Dst);
3561 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3562 IEM_MC_ARG(uint64_t, u64Src, 1);
3563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3564
3565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3567 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3568 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3569 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3570
3571 /* Doesn't cause a transition to MMX mode. */
3572 IEM_MC_PREPARE_SSE_USAGE();
3573
3574 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2pd_u128, pDst, u64Src);
3575 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3576 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3577
3578 IEM_MC_ADVANCE_RIP_AND_FINISH();
3579 IEM_MC_END();
3580 }
3581}
3582
3583
3584/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3585FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3586{
3587 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3588
3589 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3590 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3591 {
3592 if (IEM_IS_MODRM_REG_MODE(bRm))
3593 {
3594 /* XMM, greg64 */
3595 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3596 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3597 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3598 IEM_MC_ARG(const int64_t *, pi64Src, 1);
3599
3600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3601 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3602 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3603
3604 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3605 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i64, pr32Dst, pi64Src);
3606 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3607 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3608
3609 IEM_MC_ADVANCE_RIP_AND_FINISH();
3610 IEM_MC_END();
3611 }
3612 else
3613 {
3614 /* XMM, [mem64] */
3615 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3617 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3618 IEM_MC_LOCAL(int64_t, i64Src);
3619 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3620 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 1);
3621
3622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3624 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3625 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3626
3627 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3628 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i64, pr32Dst, pi64Src);
3629 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3630 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3631
3632 IEM_MC_ADVANCE_RIP_AND_FINISH();
3633 IEM_MC_END();
3634 }
3635 }
3636 else
3637 {
3638 if (IEM_IS_MODRM_REG_MODE(bRm))
3639 {
3640 /* greg, XMM */
3641 IEM_MC_BEGIN(0, 0);
3642 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3643 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3644 IEM_MC_ARG(const int32_t *, pi32Src, 1);
3645
3646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3647 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3648 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3649
3650 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3651 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i32, pr32Dst, pi32Src);
3652 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3653 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3654
3655 IEM_MC_ADVANCE_RIP_AND_FINISH();
3656 IEM_MC_END();
3657 }
3658 else
3659 {
3660 /* greg, [mem32] */
3661 IEM_MC_BEGIN(0, 0);
3662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3663 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3664 IEM_MC_LOCAL(int32_t, i32Src);
3665 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3666 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 1);
3667
3668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3670 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3671 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3672
3673 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3674 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i32, pr32Dst, pi32Src);
3675 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3676 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3677
3678 IEM_MC_ADVANCE_RIP_AND_FINISH();
3679 IEM_MC_END();
3680 }
3681 }
3682}
3683
3684
3685/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3686FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3687{
3688 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3689
3690 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3691 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3692 {
3693 if (IEM_IS_MODRM_REG_MODE(bRm))
3694 {
3695 /* XMM, greg64 */
3696 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3697 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3698 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3699 IEM_MC_ARG(const int64_t *, pi64Src, 1);
3700
3701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3702 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3703 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3704
3705 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3706 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i64, pr64Dst, pi64Src);
3707 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3708 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3709
3710 IEM_MC_ADVANCE_RIP_AND_FINISH();
3711 IEM_MC_END();
3712 }
3713 else
3714 {
3715 /* XMM, [mem64] */
3716 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3718 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3719 IEM_MC_LOCAL(int64_t, i64Src);
3720 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3721 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 1);
3722
3723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3725 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3726 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3727
3728 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3729 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i64, pr64Dst, pi64Src);
3730 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3731 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3732
3733 IEM_MC_ADVANCE_RIP_AND_FINISH();
3734 IEM_MC_END();
3735 }
3736 }
3737 else
3738 {
3739 if (IEM_IS_MODRM_REG_MODE(bRm))
3740 {
3741 /* XMM, greg32 */
3742 IEM_MC_BEGIN(0, 0);
3743 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3744 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3745 IEM_MC_ARG(const int32_t *, pi32Src, 1);
3746
3747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3748 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3749 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3750
3751 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3752 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i32, pr64Dst, pi32Src);
3753 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3754 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3755
3756 IEM_MC_ADVANCE_RIP_AND_FINISH();
3757 IEM_MC_END();
3758 }
3759 else
3760 {
3761 /* XMM, [mem32] */
3762 IEM_MC_BEGIN(0, 0);
3763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3764 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3765 IEM_MC_LOCAL(int32_t, i32Src);
3766 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3767 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 1);
3768
3769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3771 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3772 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3773
3774 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3775 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i32, pr64Dst, pi32Src);
3776 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3777 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3778
3779 IEM_MC_ADVANCE_RIP_AND_FINISH();
3780 IEM_MC_END();
3781 }
3782 }
3783}
3784
3785
3786/**
3787 * @opcode 0x2b
3788 * @opcodesub !11 mr/reg
3789 * @oppfx none
3790 * @opcpuid sse
3791 * @opgroup og_sse1_cachect
3792 * @opxcpttype 1
3793 * @optest op1=1 op2=2 -> op1=2
3794 * @optest op1=0 op2=-42 -> op1=-42
3795 */
3796FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
3797{
3798 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3800 if (IEM_IS_MODRM_MEM_MODE(bRm))
3801 {
3802 /*
3803 * memory, register.
3804 */
3805 IEM_MC_BEGIN(0, 0);
3806 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3808
3809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3811 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3812 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3813
3814 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3815 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3816
3817 IEM_MC_ADVANCE_RIP_AND_FINISH();
3818 IEM_MC_END();
3819 }
3820 /* The register, register encoding is invalid. */
3821 else
3822 IEMOP_RAISE_INVALID_OPCODE_RET();
3823}
3824
3825/**
3826 * @opcode 0x2b
3827 * @opcodesub !11 mr/reg
3828 * @oppfx 0x66
3829 * @opcpuid sse2
3830 * @opgroup og_sse2_cachect
3831 * @opxcpttype 1
3832 * @optest op1=1 op2=2 -> op1=2
3833 * @optest op1=0 op2=-42 -> op1=-42
3834 */
3835FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
3836{
3837 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3838 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3839 if (IEM_IS_MODRM_MEM_MODE(bRm))
3840 {
3841 /*
3842 * memory, register.
3843 */
3844 IEM_MC_BEGIN(0, 0);
3845 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3847
3848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3850 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3851 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3852
3853 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3854 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3855
3856 IEM_MC_ADVANCE_RIP_AND_FINISH();
3857 IEM_MC_END();
3858 }
3859 /* The register, register encoding is invalid. */
3860 else
3861 IEMOP_RAISE_INVALID_OPCODE_RET();
3862}
3863/* Opcode 0xf3 0x0f 0x2b - invalid */
3864/* Opcode 0xf2 0x0f 0x2b - invalid */
3865
3866
3867/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
3868FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
3869{
3870 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3872 if (IEM_IS_MODRM_REG_MODE(bRm))
3873 {
3874 /*
3875 * Register, register.
3876 */
3877 IEM_MC_BEGIN(0, 0);
3878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3879 IEM_MC_LOCAL(uint64_t, u64Dst);
3880 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
3881 IEM_MC_ARG(uint64_t, u64Src, 1);
3882 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3883 IEM_MC_PREPARE_FPU_USAGE();
3884 IEM_MC_FPU_TO_MMX_MODE();
3885
3886 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
3887
3888 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttps2pi_u128, pu64Dst, u64Src);
3889 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3890 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
3891
3892 IEM_MC_ADVANCE_RIP_AND_FINISH();
3893 IEM_MC_END();
3894 }
3895 else
3896 {
3897 /*
3898 * Register, memory.
3899 */
3900 IEM_MC_BEGIN(0, 0);
3901 IEM_MC_LOCAL(uint64_t, u64Dst);
3902 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
3903 IEM_MC_ARG(uint64_t, u64Src, 1);
3904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3905
3906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3908 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3909 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3910
3911 IEM_MC_PREPARE_FPU_USAGE();
3912 IEM_MC_FPU_TO_MMX_MODE();
3913
3914 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttps2pi_u128, pu64Dst, u64Src);
3915 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3916 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
3917
3918 IEM_MC_ADVANCE_RIP_AND_FINISH();
3919 IEM_MC_END();
3920 }
3921}
3922
3923
3924/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
3925FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
3926{
3927 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3928 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3929 if (IEM_IS_MODRM_REG_MODE(bRm))
3930 {
3931 /*
3932 * Register, register.
3933 */
3934 IEM_MC_BEGIN(0, 0);
3935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3936 IEM_MC_LOCAL(uint64_t, u64Dst);
3937 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
3938 IEM_MC_ARG(PCX86XMMREG, pSrc, 1);
3939 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3940 IEM_MC_PREPARE_FPU_USAGE();
3941 IEM_MC_FPU_TO_MMX_MODE();
3942
3943 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3944
3945 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttpd2pi_u128, pu64Dst, pSrc);
3946 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3947 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
3948
3949 IEM_MC_ADVANCE_RIP_AND_FINISH();
3950 IEM_MC_END();
3951 }
3952 else
3953 {
3954 /*
3955 * Register, memory.
3956 */
3957 IEM_MC_BEGIN(0, 0);
3958 IEM_MC_LOCAL(uint64_t, u64Dst);
3959 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
3960 IEM_MC_LOCAL(X86XMMREG, uSrc);
3961 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 1);
3962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3963
3964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3966 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3967 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3968
3969 IEM_MC_PREPARE_FPU_USAGE();
3970 IEM_MC_FPU_TO_MMX_MODE();
3971
3972 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttpd2pi_u128, pu64Dst, pSrc);
3973 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3974 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
3975
3976 IEM_MC_ADVANCE_RIP_AND_FINISH();
3977 IEM_MC_END();
3978 }
3979}
3980
3981
3982/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
3983FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
3984{
3985 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3986
3987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3988 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3989 {
3990 if (IEM_IS_MODRM_REG_MODE(bRm))
3991 {
3992 /* greg64, XMM */
3993 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3994 IEM_MC_LOCAL(int64_t, i64Dst);
3995 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
3996 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
3997
3998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3999 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4000 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4001
4002 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4003 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i64_r32, pi64Dst, pu32Src);
4004 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4005 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4006
4007 IEM_MC_ADVANCE_RIP_AND_FINISH();
4008 IEM_MC_END();
4009 }
4010 else
4011 {
4012 /* greg64, [mem64] */
4013 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4015 IEM_MC_LOCAL(int64_t, i64Dst);
4016 IEM_MC_LOCAL(uint32_t, u32Src);
4017 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4018 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4019
4020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4022 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4023 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4024
4025 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4026 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i64_r32, pi64Dst, pu32Src);
4027 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4028 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4029
4030 IEM_MC_ADVANCE_RIP_AND_FINISH();
4031 IEM_MC_END();
4032 }
4033 }
4034 else
4035 {
4036 if (IEM_IS_MODRM_REG_MODE(bRm))
4037 {
4038 /* greg, XMM */
4039 IEM_MC_BEGIN(0, 0);
4040 IEM_MC_LOCAL(int32_t, i32Dst);
4041 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4042 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4043
4044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4045 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4046 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4047
4048 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4049 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i32_r32, pi32Dst, pu32Src);
4050 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4051 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4052
4053 IEM_MC_ADVANCE_RIP_AND_FINISH();
4054 IEM_MC_END();
4055 }
4056 else
4057 {
4058 /* greg, [mem] */
4059 IEM_MC_BEGIN(0, 0);
4060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4061 IEM_MC_LOCAL(int32_t, i32Dst);
4062 IEM_MC_LOCAL(uint32_t, u32Src);
4063 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4064 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4065
4066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4068 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4069 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4070
4071 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4072 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i32_r32, pi32Dst, pu32Src);
4073 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4074 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4075
4076 IEM_MC_ADVANCE_RIP_AND_FINISH();
4077 IEM_MC_END();
4078 }
4079 }
4080}
4081
4082
4083/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4084FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4085{
4086 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4087
4088 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4089 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4090 {
4091 if (IEM_IS_MODRM_REG_MODE(bRm))
4092 {
4093 /* greg64, XMM */
4094 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4095 IEM_MC_LOCAL(int64_t, i64Dst);
4096 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4097 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4098
4099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4100 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4101 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4102
4103 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4104 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i64_r64, pi64Dst, pu64Src);
4105 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4106 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4107
4108 IEM_MC_ADVANCE_RIP_AND_FINISH();
4109 IEM_MC_END();
4110 }
4111 else
4112 {
4113 /* greg64, [mem64] */
4114 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4116 IEM_MC_LOCAL(int64_t, i64Dst);
4117 IEM_MC_LOCAL(uint64_t, u64Src);
4118 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4119 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4120
4121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4123 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4124 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4125
4126 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4127 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i64_r64, pi64Dst, pu64Src);
4128 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4129 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4130
4131 IEM_MC_ADVANCE_RIP_AND_FINISH();
4132 IEM_MC_END();
4133 }
4134 }
4135 else
4136 {
4137 if (IEM_IS_MODRM_REG_MODE(bRm))
4138 {
4139 /* greg, XMM */
4140 IEM_MC_BEGIN(0, 0);
4141 IEM_MC_LOCAL(int32_t, i32Dst);
4142 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4143 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4144
4145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4146 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4147 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4148
4149 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4150 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i32_r64, pi32Dst, pu64Src);
4151 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4152 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4153
4154 IEM_MC_ADVANCE_RIP_AND_FINISH();
4155 IEM_MC_END();
4156 }
4157 else
4158 {
4159 /* greg32, [mem32] */
4160 IEM_MC_BEGIN(0, 0);
4161 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4162 IEM_MC_LOCAL(int32_t, i32Dst);
4163 IEM_MC_LOCAL(uint64_t, u64Src);
4164 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4165 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4166
4167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4169 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4170 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4171
4172 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4173 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i32_r64, pi32Dst, pu64Src);
4174 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4175 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4176
4177 IEM_MC_ADVANCE_RIP_AND_FINISH();
4178 IEM_MC_END();
4179 }
4180 }
4181}
4182
4183
4184/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4185FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4186{
4187 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4188 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4189 if (IEM_IS_MODRM_REG_MODE(bRm))
4190 {
4191 /*
4192 * Register, register.
4193 */
4194 IEM_MC_BEGIN(0, 0);
4195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4196 IEM_MC_LOCAL(uint64_t, u64Dst);
4197 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4198 IEM_MC_ARG(uint64_t, u64Src, 1);
4199
4200 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4201 IEM_MC_PREPARE_FPU_USAGE();
4202 IEM_MC_FPU_TO_MMX_MODE();
4203
4204 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4205
4206 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pi_u128, pu64Dst, u64Src);
4207 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4208 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4209
4210 IEM_MC_ADVANCE_RIP_AND_FINISH();
4211 IEM_MC_END();
4212 }
4213 else
4214 {
4215 /*
4216 * Register, memory.
4217 */
4218 IEM_MC_BEGIN(0, 0);
4219 IEM_MC_LOCAL(uint64_t, u64Dst);
4220 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4221 IEM_MC_ARG(uint64_t, u64Src, 1);
4222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4223
4224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4226 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4227 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4228
4229 IEM_MC_PREPARE_FPU_USAGE();
4230 IEM_MC_FPU_TO_MMX_MODE();
4231
4232 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pi_u128, pu64Dst, u64Src);
4233 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4234 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4235
4236 IEM_MC_ADVANCE_RIP_AND_FINISH();
4237 IEM_MC_END();
4238 }
4239}
4240
4241
4242/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4243FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4244{
4245 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4247 if (IEM_IS_MODRM_REG_MODE(bRm))
4248 {
4249 /*
4250 * Register, register.
4251 */
4252 IEM_MC_BEGIN(0, 0);
4253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4254 IEM_MC_LOCAL(uint64_t, u64Dst);
4255 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4256 IEM_MC_ARG(PCX86XMMREG, pSrc, 1);
4257
4258 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4259 IEM_MC_PREPARE_FPU_USAGE();
4260 IEM_MC_FPU_TO_MMX_MODE();
4261
4262 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4263
4264 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpd2pi_u128, pu64Dst, pSrc);
4265 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4266 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4267
4268 IEM_MC_ADVANCE_RIP_AND_FINISH();
4269 IEM_MC_END();
4270 }
4271 else
4272 {
4273 /*
4274 * Register, memory.
4275 */
4276 IEM_MC_BEGIN(0, 0);
4277 IEM_MC_LOCAL(uint64_t, u64Dst);
4278 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4279 IEM_MC_LOCAL(X86XMMREG, uSrc);
4280 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 1);
4281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4282
4283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4285 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4286 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4287
4288 IEM_MC_PREPARE_FPU_USAGE();
4289 IEM_MC_FPU_TO_MMX_MODE();
4290
4291 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpd2pi_u128, pu64Dst, pSrc);
4292 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4293 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4294
4295 IEM_MC_ADVANCE_RIP_AND_FINISH();
4296 IEM_MC_END();
4297 }
4298}
4299
4300
4301/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4302FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4303{
4304 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4305
4306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4307 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4308 {
4309 if (IEM_IS_MODRM_REG_MODE(bRm))
4310 {
4311 /* greg64, XMM */
4312 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4313 IEM_MC_LOCAL(int64_t, i64Dst);
4314 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4315 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4316
4317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4318 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4319 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4320
4321 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4322 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i64_r32, pi64Dst, pu32Src);
4323 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4324 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4325
4326 IEM_MC_ADVANCE_RIP_AND_FINISH();
4327 IEM_MC_END();
4328 }
4329 else
4330 {
4331 /* greg64, [mem64] */
4332 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4334 IEM_MC_LOCAL(int64_t, i64Dst);
4335 IEM_MC_LOCAL(uint32_t, u32Src);
4336 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4337 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4338
4339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4341 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4342 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4343
4344 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4345 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i64_r32, pi64Dst, pu32Src);
4346 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4347 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4348
4349 IEM_MC_ADVANCE_RIP_AND_FINISH();
4350 IEM_MC_END();
4351 }
4352 }
4353 else
4354 {
4355 if (IEM_IS_MODRM_REG_MODE(bRm))
4356 {
4357 /* greg, XMM */
4358 IEM_MC_BEGIN(0, 0);
4359 IEM_MC_LOCAL(int32_t, i32Dst);
4360 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4361 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4362
4363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4364 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4365 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4366
4367 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4368 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i32_r32, pi32Dst, pu32Src);
4369 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4370 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4371
4372 IEM_MC_ADVANCE_RIP_AND_FINISH();
4373 IEM_MC_END();
4374 }
4375 else
4376 {
4377 /* greg, [mem] */
4378 IEM_MC_BEGIN(0, 0);
4379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4380 IEM_MC_LOCAL(int32_t, i32Dst);
4381 IEM_MC_LOCAL(uint32_t, u32Src);
4382 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4383 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4384
4385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4387 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4388 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4389
4390 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4391 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i32_r32, pi32Dst, pu32Src);
4392 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4393 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4394
4395 IEM_MC_ADVANCE_RIP_AND_FINISH();
4396 IEM_MC_END();
4397 }
4398 }
4399}
4400
4401
4402/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4403FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4404{
4405 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4406
4407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4408 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4409 {
4410 if (IEM_IS_MODRM_REG_MODE(bRm))
4411 {
4412 /* greg64, XMM */
4413 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4414 IEM_MC_LOCAL(int64_t, i64Dst);
4415 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4416 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4417
4418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4419 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4420 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4421
4422 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4423 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i64_r64, pi64Dst, pu64Src);
4424 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4425 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4426
4427 IEM_MC_ADVANCE_RIP_AND_FINISH();
4428 IEM_MC_END();
4429 }
4430 else
4431 {
4432 /* greg64, [mem64] */
4433 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4435 IEM_MC_LOCAL(int64_t, i64Dst);
4436 IEM_MC_LOCAL(uint64_t, u64Src);
4437 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4438 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4439
4440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4442 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4443 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4444
4445 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4446 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i64_r64, pi64Dst, pu64Src);
4447 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4448 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4449
4450 IEM_MC_ADVANCE_RIP_AND_FINISH();
4451 IEM_MC_END();
4452 }
4453 }
4454 else
4455 {
4456 if (IEM_IS_MODRM_REG_MODE(bRm))
4457 {
4458 /* greg32, XMM */
4459 IEM_MC_BEGIN(0, 0);
4460 IEM_MC_LOCAL(int32_t, i32Dst);
4461 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4462 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4463
4464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4465 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4466 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4467
4468 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4469 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i32_r64, pi32Dst, pu64Src);
4470 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4471 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4472
4473 IEM_MC_ADVANCE_RIP_AND_FINISH();
4474 IEM_MC_END();
4475 }
4476 else
4477 {
4478 /* greg32, [mem64] */
4479 IEM_MC_BEGIN(0, 0);
4480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4481 IEM_MC_LOCAL(int32_t, i32Dst);
4482 IEM_MC_LOCAL(uint64_t, u64Src);
4483 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4484 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4485
4486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4488 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4489 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4490
4491 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4492 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i32_r64, pi32Dst, pu64Src);
4493 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4494 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4495
4496 IEM_MC_ADVANCE_RIP_AND_FINISH();
4497 IEM_MC_END();
4498 }
4499 }
4500}
4501
4502
4503/**
4504 * @opcode 0x2e
4505 * @oppfx none
4506 * @opflmodify cf,pf,af,zf,sf,of
4507 * @opflclear af,sf,of
4508 */
4509FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4510{
4511 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4513 if (IEM_IS_MODRM_REG_MODE(bRm))
4514 {
4515 /*
4516 * Register, register.
4517 */
4518 IEM_MC_BEGIN(0, 0);
4519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4520 IEM_MC_LOCAL(uint32_t, fEFlags);
4521 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4522 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4523 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4524 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4525 IEM_MC_PREPARE_SSE_USAGE();
4526 IEM_MC_FETCH_EFLAGS(fEFlags);
4527 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4528 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
4529 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomiss_u128, pEFlags, uSrc1, uSrc2);
4530 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4531 IEM_MC_COMMIT_EFLAGS(fEFlags);
4532
4533 IEM_MC_ADVANCE_RIP_AND_FINISH();
4534 IEM_MC_END();
4535 }
4536 else
4537 {
4538 /*
4539 * Register, memory.
4540 */
4541 IEM_MC_BEGIN(0, 0);
4542 IEM_MC_LOCAL(uint32_t, fEFlags);
4543 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4544 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4545 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4547
4548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4550 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4551 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4552
4553 IEM_MC_PREPARE_SSE_USAGE();
4554 IEM_MC_FETCH_EFLAGS(fEFlags);
4555 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4556 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomiss_u128, pEFlags, uSrc1, uSrc2);
4557 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4558 IEM_MC_COMMIT_EFLAGS(fEFlags);
4559
4560 IEM_MC_ADVANCE_RIP_AND_FINISH();
4561 IEM_MC_END();
4562 }
4563}
4564
4565
4566/**
4567 * @opcode 0x2e
4568 * @oppfx 0x66
4569 * @opflmodify cf,pf,af,zf,sf,of
4570 * @opflclear af,sf,of
4571 */
4572FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4573{
4574 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4576 if (IEM_IS_MODRM_REG_MODE(bRm))
4577 {
4578 /*
4579 * Register, register.
4580 */
4581 IEM_MC_BEGIN(0, 0);
4582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4583 IEM_MC_LOCAL(uint32_t, fEFlags);
4584 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4585 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4586 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4587 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4588 IEM_MC_PREPARE_SSE_USAGE();
4589 IEM_MC_FETCH_EFLAGS(fEFlags);
4590 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4591 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
4592 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomisd_u128, pEFlags, uSrc1, uSrc2);
4593 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4594 IEM_MC_COMMIT_EFLAGS(fEFlags);
4595
4596 IEM_MC_ADVANCE_RIP_AND_FINISH();
4597 IEM_MC_END();
4598 }
4599 else
4600 {
4601 /*
4602 * Register, memory.
4603 */
4604 IEM_MC_BEGIN(0, 0);
4605 IEM_MC_LOCAL(uint32_t, fEFlags);
4606 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4607 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4608 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4610
4611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4613 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4614 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4615
4616 IEM_MC_PREPARE_SSE_USAGE();
4617 IEM_MC_FETCH_EFLAGS(fEFlags);
4618 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4619 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomisd_u128, pEFlags, uSrc1, uSrc2);
4620 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4621 IEM_MC_COMMIT_EFLAGS(fEFlags);
4622
4623 IEM_MC_ADVANCE_RIP_AND_FINISH();
4624 IEM_MC_END();
4625 }
4626}
4627
4628
4629/* Opcode 0xf3 0x0f 0x2e - invalid */
4630/* Opcode 0xf2 0x0f 0x2e - invalid */
4631
4632
4633/**
4634 * @opcode 0x2e
4635 * @oppfx none
4636 * @opflmodify cf,pf,af,zf,sf,of
4637 * @opflclear af,sf,of
4638 */
4639FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
4640{
4641 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4643 if (IEM_IS_MODRM_REG_MODE(bRm))
4644 {
4645 /*
4646 * Register, register.
4647 */
4648 IEM_MC_BEGIN(0, 0);
4649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4650 IEM_MC_LOCAL(uint32_t, fEFlags);
4651 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4652 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4653 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4654 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4655 IEM_MC_PREPARE_SSE_USAGE();
4656 IEM_MC_FETCH_EFLAGS(fEFlags);
4657 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4658 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
4659 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comiss_u128, pEFlags, uSrc1, uSrc2);
4660 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4661 IEM_MC_COMMIT_EFLAGS(fEFlags);
4662
4663 IEM_MC_ADVANCE_RIP_AND_FINISH();
4664 IEM_MC_END();
4665 }
4666 else
4667 {
4668 /*
4669 * Register, memory.
4670 */
4671 IEM_MC_BEGIN(0, 0);
4672 IEM_MC_LOCAL(uint32_t, fEFlags);
4673 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4674 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4675 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4677
4678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4680 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4681 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4682
4683 IEM_MC_PREPARE_SSE_USAGE();
4684 IEM_MC_FETCH_EFLAGS(fEFlags);
4685 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4686 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comiss_u128, pEFlags, uSrc1, uSrc2);
4687 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4688 IEM_MC_COMMIT_EFLAGS(fEFlags);
4689
4690 IEM_MC_ADVANCE_RIP_AND_FINISH();
4691 IEM_MC_END();
4692 }
4693}
4694
4695
4696/**
4697 * @opcode 0x2f
4698 * @oppfx 0x66
4699 * @opflmodify cf,pf,af,zf,sf,of
4700 * @opflclear af,sf,of
4701 */
4702FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
4703{
4704 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4706 if (IEM_IS_MODRM_REG_MODE(bRm))
4707 {
4708 /*
4709 * Register, register.
4710 */
4711 IEM_MC_BEGIN(0, 0);
4712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4713 IEM_MC_LOCAL(uint32_t, fEFlags);
4714 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4715 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4716 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4717 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4718 IEM_MC_PREPARE_SSE_USAGE();
4719 IEM_MC_FETCH_EFLAGS(fEFlags);
4720 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4721 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
4722 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comisd_u128, pEFlags, uSrc1, uSrc2);
4723 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4724 IEM_MC_COMMIT_EFLAGS(fEFlags);
4725
4726 IEM_MC_ADVANCE_RIP_AND_FINISH();
4727 IEM_MC_END();
4728 }
4729 else
4730 {
4731 /*
4732 * Register, memory.
4733 */
4734 IEM_MC_BEGIN(0, 0);
4735 IEM_MC_LOCAL(uint32_t, fEFlags);
4736 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4737 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4738 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4740
4741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4743 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4744 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4745
4746 IEM_MC_PREPARE_SSE_USAGE();
4747 IEM_MC_FETCH_EFLAGS(fEFlags);
4748 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4749 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comisd_u128, pEFlags, uSrc1, uSrc2);
4750 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4751 IEM_MC_COMMIT_EFLAGS(fEFlags);
4752
4753 IEM_MC_ADVANCE_RIP_AND_FINISH();
4754 IEM_MC_END();
4755 }
4756}
4757
4758
4759/* Opcode 0xf3 0x0f 0x2f - invalid */
4760/* Opcode 0xf2 0x0f 0x2f - invalid */
4761
4762/** Opcode 0x0f 0x30. */
4763FNIEMOP_DEF(iemOp_wrmsr)
4764{
4765 IEMOP_MNEMONIC(wrmsr, "wrmsr");
4766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4767 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wrmsr);
4768}
4769
4770
4771/** Opcode 0x0f 0x31. */
4772FNIEMOP_DEF(iemOp_rdtsc)
4773{
4774 IEMOP_MNEMONIC(rdtsc, "rdtsc");
4775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4776 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4777 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4778 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4779 iemCImpl_rdtsc);
4780}
4781
4782
4783/** Opcode 0x0f 0x33. */
4784FNIEMOP_DEF(iemOp_rdmsr)
4785{
4786 IEMOP_MNEMONIC(rdmsr, "rdmsr");
4787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4788 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4789 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4790 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4791 iemCImpl_rdmsr);
4792}
4793
4794
4795/** Opcode 0x0f 0x34. */
4796FNIEMOP_DEF(iemOp_rdpmc)
4797{
4798 IEMOP_MNEMONIC(rdpmc, "rdpmc");
4799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4800 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4801 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4802 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4803 iemCImpl_rdpmc);
4804}
4805
4806
4807/** Opcode 0x0f 0x34. */
4808FNIEMOP_DEF(iemOp_sysenter)
4809{
4810 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4812 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
4813 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
4814 iemCImpl_sysenter);
4815}
4816
4817/** Opcode 0x0f 0x35. */
4818FNIEMOP_DEF(iemOp_sysexit)
4819{
4820 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4822 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
4823 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
4824 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
4825}
4826
4827/** Opcode 0x0f 0x37. */
4828FNIEMOP_STUB(iemOp_getsec);
4829
4830
4831/** Opcode 0x0f 0x38. */
4832FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
4833{
4834#ifdef IEM_WITH_THREE_0F_38
4835 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4836 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4837#else
4838 IEMOP_BITCH_ABOUT_STUB();
4839 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4840#endif
4841}
4842
4843
4844/** Opcode 0x0f 0x3a. */
4845FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
4846{
4847#ifdef IEM_WITH_THREE_0F_3A
4848 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4849 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4850#else
4851 IEMOP_BITCH_ABOUT_STUB();
4852 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4853#endif
4854}
4855
4856
4857/**
4858 * Implements a conditional move.
4859 *
4860 * Wish there was an obvious way to do this where we could share and reduce
4861 * code bloat.
4862 *
4863 * @param a_Cnd The conditional "microcode" operation.
4864 */
4865#define CMOV_X(a_Cnd) \
4866 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4867 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4868 { \
4869 switch (pVCpu->iem.s.enmEffOpSize) \
4870 { \
4871 case IEMMODE_16BIT: \
4872 IEM_MC_BEGIN(0, 0); \
4873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4874 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4875 a_Cnd { \
4876 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4877 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4878 } IEM_MC_ENDIF(); \
4879 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4880 IEM_MC_END(); \
4881 break; \
4882 \
4883 case IEMMODE_32BIT: \
4884 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4886 IEM_MC_LOCAL(uint32_t, u32Tmp); \
4887 a_Cnd { \
4888 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4889 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
4890 } IEM_MC_ELSE() { \
4891 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
4892 } IEM_MC_ENDIF(); \
4893 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4894 IEM_MC_END(); \
4895 break; \
4896 \
4897 case IEMMODE_64BIT: \
4898 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4900 IEM_MC_LOCAL(uint64_t, u64Tmp); \
4901 a_Cnd { \
4902 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4903 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
4904 } IEM_MC_ENDIF(); \
4905 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4906 IEM_MC_END(); \
4907 break; \
4908 \
4909 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4910 } \
4911 } \
4912 else \
4913 { \
4914 switch (pVCpu->iem.s.enmEffOpSize) \
4915 { \
4916 case IEMMODE_16BIT: \
4917 IEM_MC_BEGIN(0, 0); \
4918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4919 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4922 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4923 a_Cnd { \
4924 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4925 } IEM_MC_ENDIF(); \
4926 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4927 IEM_MC_END(); \
4928 break; \
4929 \
4930 case IEMMODE_32BIT: \
4931 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4933 IEM_MC_LOCAL(uint32_t, u32Tmp); \
4934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4936 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4937 a_Cnd { \
4938 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
4939 } IEM_MC_ELSE() { \
4940 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
4941 } IEM_MC_ENDIF(); \
4942 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4943 IEM_MC_END(); \
4944 break; \
4945 \
4946 case IEMMODE_64BIT: \
4947 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4949 IEM_MC_LOCAL(uint64_t, u64Tmp); \
4950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4952 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4953 a_Cnd { \
4954 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
4955 } IEM_MC_ENDIF(); \
4956 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4957 IEM_MC_END(); \
4958 break; \
4959 \
4960 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4961 } \
4962 } do {} while (0)
4963
4964
4965
4966/**
4967 * @opcode 0x40
4968 * @opfltest of
4969 */
4970FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
4971{
4972 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
4973 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
4974}
4975
4976
4977/**
4978 * @opcode 0x41
4979 * @opfltest of
4980 */
4981FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
4982{
4983 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
4984 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
4985}
4986
4987
4988/**
4989 * @opcode 0x42
4990 * @opfltest cf
4991 */
4992FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
4993{
4994 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
4995 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
4996}
4997
4998
4999/**
5000 * @opcode 0x43
5001 * @opfltest cf
5002 */
5003FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5004{
5005 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5006 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5007}
5008
5009
5010/**
5011 * @opcode 0x44
5012 * @opfltest zf
5013 */
5014FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5015{
5016 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5017 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5018}
5019
5020
5021/**
5022 * @opcode 0x45
5023 * @opfltest zf
5024 */
5025FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5026{
5027 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5028 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5029}
5030
5031
5032/**
5033 * @opcode 0x46
5034 * @opfltest cf,zf
5035 */
5036FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5037{
5038 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5039 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5040}
5041
5042
5043/**
5044 * @opcode 0x47
5045 * @opfltest cf,zf
5046 */
5047FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5048{
5049 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5050 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5051}
5052
5053
5054/**
5055 * @opcode 0x48
5056 * @opfltest sf
5057 */
5058FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5059{
5060 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5061 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5062}
5063
5064
5065/**
5066 * @opcode 0x49
5067 * @opfltest sf
5068 */
5069FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5070{
5071 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5072 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5073}
5074
5075
5076/**
5077 * @opcode 0x4a
5078 * @opfltest pf
5079 */
5080FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5081{
5082 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5083 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5084}
5085
5086
5087/**
5088 * @opcode 0x4b
5089 * @opfltest pf
5090 */
5091FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5092{
5093 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5094 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5095}
5096
5097
5098/**
5099 * @opcode 0x4c
5100 * @opfltest sf,of
5101 */
5102FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5103{
5104 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5105 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5106}
5107
5108
5109/**
5110 * @opcode 0x4d
5111 * @opfltest sf,of
5112 */
5113FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5114{
5115 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5116 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5117}
5118
5119
5120/**
5121 * @opcode 0x4e
5122 * @opfltest zf,sf,of
5123 */
5124FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5125{
5126 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5127 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5128}
5129
5130
5131/**
5132 * @opcode 0x4e
5133 * @opfltest zf,sf,of
5134 */
5135FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5136{
5137 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5138 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5139}
5140
5141#undef CMOV_X
5142
5143/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5144FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5145{
5146 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5147 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5148 if (IEM_IS_MODRM_REG_MODE(bRm))
5149 {
5150 /*
5151 * Register, register.
5152 */
5153 IEM_MC_BEGIN(0, 0);
5154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5155 IEM_MC_LOCAL(uint8_t, u8Dst);
5156 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5157 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5158 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5159 IEM_MC_PREPARE_SSE_USAGE();
5160 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5161 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5162 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5163 IEM_MC_ADVANCE_RIP_AND_FINISH();
5164 IEM_MC_END();
5165 }
5166 /* No memory operand. */
5167 else
5168 IEMOP_RAISE_INVALID_OPCODE_RET();
5169}
5170
5171
5172/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5173FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5174{
5175 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5176 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5177 if (IEM_IS_MODRM_REG_MODE(bRm))
5178 {
5179 /*
5180 * Register, register.
5181 */
5182 IEM_MC_BEGIN(0, 0);
5183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5184 IEM_MC_LOCAL(uint8_t, u8Dst);
5185 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5186 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5187 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5188 IEM_MC_PREPARE_SSE_USAGE();
5189 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5190 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5191 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5192 IEM_MC_ADVANCE_RIP_AND_FINISH();
5193 IEM_MC_END();
5194 }
5195 /* No memory operand. */
5196 else
5197 IEMOP_RAISE_INVALID_OPCODE_RET();
5198
5199}
5200
5201
5202/* Opcode 0xf3 0x0f 0x50 - invalid */
5203/* Opcode 0xf2 0x0f 0x50 - invalid */
5204
5205
5206/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5207FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5208{
5209 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5210 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5211}
5212
5213
5214/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5215FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5216{
5217 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5218 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5219}
5220
5221
5222/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5223FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5224{
5225 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5226 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5227}
5228
5229
5230/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5231FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5232{
5233 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5234 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5235}
5236
5237
5238/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5239FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5240{
5241 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5242 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5243}
5244
5245
5246/* Opcode 0x66 0x0f 0x52 - invalid */
5247
5248
5249/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5250FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5251{
5252 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5253 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5254}
5255
5256
5257/* Opcode 0xf2 0x0f 0x52 - invalid */
5258
5259
5260/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5261FNIEMOP_DEF(iemOp_rcpps_Vps_Wps)
5262{
5263 IEMOP_MNEMONIC2(RM, RCPPS, rcpps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5264 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rcpps_u128);
5265}
5266
5267
5268/* Opcode 0x66 0x0f 0x53 - invalid */
5269
5270
5271/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5272FNIEMOP_DEF(iemOp_rcpss_Vss_Wss)
5273{
5274 IEMOP_MNEMONIC2(RM, RCPSS, rcpss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5275 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rcpss_u128_r32);
5276}
5277
5278
5279/* Opcode 0xf2 0x0f 0x53 - invalid */
5280
5281
5282/** Opcode 0x0f 0x54 - andps Vps, Wps */
5283FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5284{
5285 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5286 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_pand_u128);
5287}
5288
5289
5290/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5291FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5292{
5293 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5294 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pand_u128);
5295}
5296
5297
5298/* Opcode 0xf3 0x0f 0x54 - invalid */
5299/* Opcode 0xf2 0x0f 0x54 - invalid */
5300
5301
5302/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5303FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5304{
5305 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5306 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_pandn_u128);
5307}
5308
5309
5310/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5311FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5312{
5313 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5314 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pandn_u128);
5315}
5316
5317
5318/* Opcode 0xf3 0x0f 0x55 - invalid */
5319/* Opcode 0xf2 0x0f 0x55 - invalid */
5320
5321
5322/** Opcode 0x0f 0x56 - orps Vps, Wps */
5323FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5324{
5325 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5326 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_por_u128);
5327}
5328
5329
5330/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5331FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5332{
5333 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5334 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_por_u128);
5335}
5336
5337
5338/* Opcode 0xf3 0x0f 0x56 - invalid */
5339/* Opcode 0xf2 0x0f 0x56 - invalid */
5340
5341
5342/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5343FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5344{
5345 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5346 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_pxor_u128);
5347}
5348
5349
5350/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5351FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5352{
5353 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5354 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pxor_u128);
5355}
5356
5357
5358/* Opcode 0xf3 0x0f 0x57 - invalid */
5359/* Opcode 0xf2 0x0f 0x57 - invalid */
5360
5361/** Opcode 0x0f 0x58 - addps Vps, Wps */
5362FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5363{
5364 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5365 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5366}
5367
5368
5369/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5370FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5371{
5372 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5373 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5374}
5375
5376
5377/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5378FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5379{
5380 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5381 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5382}
5383
5384
5385/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5386FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5387{
5388 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5389 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5390}
5391
5392
5393/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5394FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5395{
5396 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5397 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5398}
5399
5400
5401/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5402FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5403{
5404 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5405 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5406}
5407
5408
5409/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5410FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5411{
5412 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5413 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5414}
5415
5416
5417/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5418FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5419{
5420 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5421 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5422}
5423
5424
5425/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5426FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5427{
5428 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5429 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5430}
5431
5432
5433/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5434FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5435{
5436 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5437 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5438}
5439
5440
5441/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5442FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5443{
5444 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5445 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5446}
5447
5448
5449/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5450FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5451{
5452 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5453 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5454}
5455
5456
5457/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5458FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5459{
5460 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5461 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5462}
5463
5464
5465/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5466FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5467{
5468 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5469 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5470}
5471
5472
5473/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5474FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5475{
5476 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5477 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5478}
5479
5480
5481/* Opcode 0xf2 0x0f 0x5b - invalid */
5482
5483
5484/** Opcode 0x0f 0x5c - subps Vps, Wps */
5485FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5486{
5487 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5488 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5489}
5490
5491
5492/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5493FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5494{
5495 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5496 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5497}
5498
5499
5500/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5501FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5502{
5503 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5504 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5505}
5506
5507
5508/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5509FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5510{
5511 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5512 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5513}
5514
5515
5516/** Opcode 0x0f 0x5d - minps Vps, Wps */
5517FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5518{
5519 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5520 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5521}
5522
5523
5524/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5525FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5526{
5527 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5528 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5529}
5530
5531
5532/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5533FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5534{
5535 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5536 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5537}
5538
5539
5540/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5541FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5542{
5543 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5544 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5545}
5546
5547
5548/** Opcode 0x0f 0x5e - divps Vps, Wps */
5549FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5550{
5551 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5552 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5553}
5554
5555
5556/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5557FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5558{
5559 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5560 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5561}
5562
5563
5564/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5565FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5566{
5567 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5568 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5569}
5570
5571
5572/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5573FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5574{
5575 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5576 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5577}
5578
5579
5580/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5581FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5582{
5583 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5584 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5585}
5586
5587
5588/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5589FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5590{
5591 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5592 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5593}
5594
5595
5596/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5597FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5598{
5599 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5600 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5601}
5602
5603
5604/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5605FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5606{
5607 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5608 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5609}
5610
5611
5612/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5613FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5614{
5615 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5616 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5617}
5618
5619
5620/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5621FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5622{
5623 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5624 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5625}
5626
5627
5628/* Opcode 0xf3 0x0f 0x60 - invalid */
5629
5630
5631/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5632FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5633{
5634 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5635 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5636 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5637}
5638
5639
5640/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5641FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5642{
5643 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5644 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5645}
5646
5647
5648/* Opcode 0xf3 0x0f 0x61 - invalid */
5649
5650
5651/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5652FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5653{
5654 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5655 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5656}
5657
5658
5659/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5660FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5661{
5662 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5663 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5664}
5665
5666
5667/* Opcode 0xf3 0x0f 0x62 - invalid */
5668
5669
5670
5671/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5672FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5673{
5674 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5675 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5676}
5677
5678
5679/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5680FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5681{
5682 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5683 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5684}
5685
5686
5687/* Opcode 0xf3 0x0f 0x63 - invalid */
5688
5689
5690/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5691FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5692{
5693 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5694 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5695}
5696
5697
5698/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5699FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5700{
5701 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5702 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
5703}
5704
5705
5706/* Opcode 0xf3 0x0f 0x64 - invalid */
5707
5708
5709/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
5710FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
5711{
5712 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5713 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
5714}
5715
5716
5717/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
5718FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
5719{
5720 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5721 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
5722}
5723
5724
5725/* Opcode 0xf3 0x0f 0x65 - invalid */
5726
5727
5728/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
5729FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
5730{
5731 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5732 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
5733}
5734
5735
5736/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
5737FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
5738{
5739 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5740 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
5741}
5742
5743
5744/* Opcode 0xf3 0x0f 0x66 - invalid */
5745
5746
5747/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
5748FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
5749{
5750 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5751 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
5752}
5753
5754
5755/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
5756FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
5757{
5758 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5759 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
5760}
5761
5762
5763/* Opcode 0xf3 0x0f 0x67 - invalid */
5764
5765
5766/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
5767 * @note Intel and AMD both uses Qd for the second parameter, however they
5768 * both list it as a mmX/mem64 operand and intel describes it as being
5769 * loaded as a qword, so it should be Qq, shouldn't it? */
5770FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
5771{
5772 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5773 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
5774}
5775
5776
5777/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
5778FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
5779{
5780 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5781 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
5782}
5783
5784
5785/* Opcode 0xf3 0x0f 0x68 - invalid */
5786
5787
5788/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
5789 * @note Intel and AMD both uses Qd for the second parameter, however they
5790 * both list it as a mmX/mem64 operand and intel describes it as being
5791 * loaded as a qword, so it should be Qq, shouldn't it? */
5792FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
5793{
5794 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5795 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
5796}
5797
5798
5799/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
5800FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
5801{
5802 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5803 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
5804
5805}
5806
5807
5808/* Opcode 0xf3 0x0f 0x69 - invalid */
5809
5810
5811/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
5812 * @note Intel and AMD both uses Qd for the second parameter, however they
5813 * both list it as a mmX/mem64 operand and intel describes it as being
5814 * loaded as a qword, so it should be Qq, shouldn't it? */
5815FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
5816{
5817 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5818 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
5819}
5820
5821
5822/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
5823FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
5824{
5825 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5826 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
5827}
5828
5829
5830/* Opcode 0xf3 0x0f 0x6a - invalid */
5831
5832
5833/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
5834FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
5835{
5836 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5837 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
5838}
5839
5840
5841/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
5842FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
5843{
5844 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5845 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
5846}
5847
5848
5849/* Opcode 0xf3 0x0f 0x6b - invalid */
5850
5851
5852/* Opcode 0x0f 0x6c - invalid */
5853
5854
5855/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
5856FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
5857{
5858 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5859 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
5860}
5861
5862
5863/* Opcode 0xf3 0x0f 0x6c - invalid */
5864/* Opcode 0xf2 0x0f 0x6c - invalid */
5865
5866
5867/* Opcode 0x0f 0x6d - invalid */
5868
5869
5870/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
5871FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
5872{
5873 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5874 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
5875}
5876
5877
5878/* Opcode 0xf3 0x0f 0x6d - invalid */
5879
5880
5881FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
5882{
5883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5884 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5885 {
5886 /**
5887 * @opcode 0x6e
5888 * @opcodesub rex.w=1
5889 * @oppfx none
5890 * @opcpuid mmx
5891 * @opgroup og_mmx_datamove
5892 * @opxcpttype 5
5893 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
5894 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
5895 */
5896 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5897 if (IEM_IS_MODRM_REG_MODE(bRm))
5898 {
5899 /* MMX, greg64 */
5900 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
5902 IEM_MC_LOCAL(uint64_t, u64Tmp);
5903
5904 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5905 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5906 IEM_MC_FPU_TO_MMX_MODE();
5907
5908 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
5909 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
5910
5911 IEM_MC_ADVANCE_RIP_AND_FINISH();
5912 IEM_MC_END();
5913 }
5914 else
5915 {
5916 /* MMX, [mem64] */
5917 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5919 IEM_MC_LOCAL(uint64_t, u64Tmp);
5920
5921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
5923 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5924 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5925
5926 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5927 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
5928 IEM_MC_FPU_TO_MMX_MODE();
5929
5930 IEM_MC_ADVANCE_RIP_AND_FINISH();
5931 IEM_MC_END();
5932 }
5933 }
5934 else
5935 {
5936 /**
5937 * @opdone
5938 * @opcode 0x6e
5939 * @opcodesub rex.w=0
5940 * @oppfx none
5941 * @opcpuid mmx
5942 * @opgroup og_mmx_datamove
5943 * @opxcpttype 5
5944 * @opfunction iemOp_movd_q_Pd_Ey
5945 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
5946 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
5947 */
5948 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5949 if (IEM_IS_MODRM_REG_MODE(bRm))
5950 {
5951 /* MMX, greg32 */
5952 IEM_MC_BEGIN(0, 0);
5953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
5954 IEM_MC_LOCAL(uint32_t, u32Tmp);
5955
5956 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5957 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5958 IEM_MC_FPU_TO_MMX_MODE();
5959
5960 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
5961 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
5962
5963 IEM_MC_ADVANCE_RIP_AND_FINISH();
5964 IEM_MC_END();
5965 }
5966 else
5967 {
5968 /* MMX, [mem32] */
5969 IEM_MC_BEGIN(0, 0);
5970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5971 IEM_MC_LOCAL(uint32_t, u32Tmp);
5972
5973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
5975 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5976 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5977
5978 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5979 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
5980 IEM_MC_FPU_TO_MMX_MODE();
5981
5982 IEM_MC_ADVANCE_RIP_AND_FINISH();
5983 IEM_MC_END();
5984 }
5985 }
5986}
5987
5988FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
5989{
5990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5991 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5992 {
5993 /**
5994 * @opcode 0x6e
5995 * @opcodesub rex.w=1
5996 * @oppfx 0x66
5997 * @opcpuid sse2
5998 * @opgroup og_sse2_simdint_datamove
5999 * @opxcpttype 5
6000 * @optest 64-bit / op1=1 op2=2 -> op1=2
6001 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6002 */
6003 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6004 if (IEM_IS_MODRM_REG_MODE(bRm))
6005 {
6006 /* XMM, greg64 */
6007 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6009 IEM_MC_LOCAL(uint64_t, u64Tmp);
6010
6011 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6012 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6013
6014 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6015 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6016
6017 IEM_MC_ADVANCE_RIP_AND_FINISH();
6018 IEM_MC_END();
6019 }
6020 else
6021 {
6022 /* XMM, [mem64] */
6023 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6025 IEM_MC_LOCAL(uint64_t, u64Tmp);
6026
6027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6029 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6030 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6031
6032 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6033 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6034
6035 IEM_MC_ADVANCE_RIP_AND_FINISH();
6036 IEM_MC_END();
6037 }
6038 }
6039 else
6040 {
6041 /**
6042 * @opdone
6043 * @opcode 0x6e
6044 * @opcodesub rex.w=0
6045 * @oppfx 0x66
6046 * @opcpuid sse2
6047 * @opgroup og_sse2_simdint_datamove
6048 * @opxcpttype 5
6049 * @opfunction iemOp_movd_q_Vy_Ey
6050 * @optest op1=1 op2=2 -> op1=2
6051 * @optest op1=0 op2=-42 -> op1=-42
6052 */
6053 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6054 if (IEM_IS_MODRM_REG_MODE(bRm))
6055 {
6056 /* XMM, greg32 */
6057 IEM_MC_BEGIN(0, 0);
6058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6059 IEM_MC_LOCAL(uint32_t, u32Tmp);
6060
6061 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6062 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6063
6064 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6065 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6066
6067 IEM_MC_ADVANCE_RIP_AND_FINISH();
6068 IEM_MC_END();
6069 }
6070 else
6071 {
6072 /* XMM, [mem32] */
6073 IEM_MC_BEGIN(0, 0);
6074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6075 IEM_MC_LOCAL(uint32_t, u32Tmp);
6076
6077 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6079 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6080 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6081
6082 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6083 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6084
6085 IEM_MC_ADVANCE_RIP_AND_FINISH();
6086 IEM_MC_END();
6087 }
6088 }
6089}
6090
6091/* Opcode 0xf3 0x0f 0x6e - invalid */
6092
6093
6094/**
6095 * @opcode 0x6f
6096 * @oppfx none
6097 * @opcpuid mmx
6098 * @opgroup og_mmx_datamove
6099 * @opxcpttype 5
6100 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6101 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6102 */
6103FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6104{
6105 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6107 if (IEM_IS_MODRM_REG_MODE(bRm))
6108 {
6109 /*
6110 * Register, register.
6111 */
6112 IEM_MC_BEGIN(0, 0);
6113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6114 IEM_MC_LOCAL(uint64_t, u64Tmp);
6115
6116 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6117 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6118 IEM_MC_FPU_TO_MMX_MODE();
6119
6120 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6121 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6122
6123 IEM_MC_ADVANCE_RIP_AND_FINISH();
6124 IEM_MC_END();
6125 }
6126 else
6127 {
6128 /*
6129 * Register, memory.
6130 */
6131 IEM_MC_BEGIN(0, 0);
6132 IEM_MC_LOCAL(uint64_t, u64Tmp);
6133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6134
6135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6137 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6138 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6139
6140 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6141 IEM_MC_FPU_TO_MMX_MODE();
6142
6143 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6144
6145 IEM_MC_ADVANCE_RIP_AND_FINISH();
6146 IEM_MC_END();
6147 }
6148}
6149
6150/**
6151 * @opcode 0x6f
6152 * @oppfx 0x66
6153 * @opcpuid sse2
6154 * @opgroup og_sse2_simdint_datamove
6155 * @opxcpttype 1
6156 * @optest op1=1 op2=2 -> op1=2
6157 * @optest op1=0 op2=-42 -> op1=-42
6158 */
6159FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6160{
6161 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6163 if (IEM_IS_MODRM_REG_MODE(bRm))
6164 {
6165 /*
6166 * Register, register.
6167 */
6168 IEM_MC_BEGIN(0, 0);
6169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6170
6171 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6172 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6173
6174 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6175 IEM_GET_MODRM_RM(pVCpu, bRm));
6176 IEM_MC_ADVANCE_RIP_AND_FINISH();
6177 IEM_MC_END();
6178 }
6179 else
6180 {
6181 /*
6182 * Register, memory.
6183 */
6184 IEM_MC_BEGIN(0, 0);
6185 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6187
6188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6190 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6191 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6192
6193 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6194 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6195
6196 IEM_MC_ADVANCE_RIP_AND_FINISH();
6197 IEM_MC_END();
6198 }
6199}
6200
6201/**
6202 * @opcode 0x6f
6203 * @oppfx 0xf3
6204 * @opcpuid sse2
6205 * @opgroup og_sse2_simdint_datamove
6206 * @opxcpttype 4UA
6207 * @optest op1=1 op2=2 -> op1=2
6208 * @optest op1=0 op2=-42 -> op1=-42
6209 */
6210FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6211{
6212 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6214 if (IEM_IS_MODRM_REG_MODE(bRm))
6215 {
6216 /*
6217 * Register, register.
6218 */
6219 IEM_MC_BEGIN(0, 0);
6220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6221 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6222 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6223 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6224 IEM_GET_MODRM_RM(pVCpu, bRm));
6225 IEM_MC_ADVANCE_RIP_AND_FINISH();
6226 IEM_MC_END();
6227 }
6228 else
6229 {
6230 /*
6231 * Register, memory.
6232 */
6233 IEM_MC_BEGIN(0, 0);
6234 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6236
6237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6239 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6240 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6241 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6242 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6243
6244 IEM_MC_ADVANCE_RIP_AND_FINISH();
6245 IEM_MC_END();
6246 }
6247}
6248
6249
6250/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6251FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6252{
6253 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6255 if (IEM_IS_MODRM_REG_MODE(bRm))
6256 {
6257 /*
6258 * Register, register.
6259 */
6260 IEM_MC_BEGIN(0, 0);
6261 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6263 IEM_MC_ARG(uint64_t *, pDst, 0);
6264 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6265 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6267 IEM_MC_PREPARE_FPU_USAGE();
6268 IEM_MC_FPU_TO_MMX_MODE();
6269
6270 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6271 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6272 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6273 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6274
6275 IEM_MC_ADVANCE_RIP_AND_FINISH();
6276 IEM_MC_END();
6277 }
6278 else
6279 {
6280 /*
6281 * Register, memory.
6282 */
6283 IEM_MC_BEGIN(0, 0);
6284 IEM_MC_ARG(uint64_t *, pDst, 0);
6285 IEM_MC_LOCAL(uint64_t, uSrc);
6286 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6288
6289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6290 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6291 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6293 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6294 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6295
6296 IEM_MC_PREPARE_FPU_USAGE();
6297 IEM_MC_FPU_TO_MMX_MODE();
6298
6299 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6300 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6301 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6302
6303 IEM_MC_ADVANCE_RIP_AND_FINISH();
6304 IEM_MC_END();
6305 }
6306}
6307
6308
6309/**
6310 * Common worker for SSE2 instructions on the forms:
6311 * pshufd xmm1, xmm2/mem128, imm8
6312 * pshufhw xmm1, xmm2/mem128, imm8
6313 * pshuflw xmm1, xmm2/mem128, imm8
6314 *
6315 * Proper alignment of the 128-bit operand is enforced.
6316 * Exceptions type 4. SSE2 cpuid checks.
6317 */
6318FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6319{
6320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6321 if (IEM_IS_MODRM_REG_MODE(bRm))
6322 {
6323 /*
6324 * Register, register.
6325 */
6326 IEM_MC_BEGIN(0, 0);
6327 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6329 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6330 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6331 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6332 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6333 IEM_MC_PREPARE_SSE_USAGE();
6334 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6335 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6336 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6337 IEM_MC_ADVANCE_RIP_AND_FINISH();
6338 IEM_MC_END();
6339 }
6340 else
6341 {
6342 /*
6343 * Register, memory.
6344 */
6345 IEM_MC_BEGIN(0, 0);
6346 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6347 IEM_MC_LOCAL(RTUINT128U, uSrc);
6348 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6350
6351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6352 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6353 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6355 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6356
6357 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6358 IEM_MC_PREPARE_SSE_USAGE();
6359 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6360 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6361
6362 IEM_MC_ADVANCE_RIP_AND_FINISH();
6363 IEM_MC_END();
6364 }
6365}
6366
6367
6368/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6369FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6370{
6371 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6372 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6373}
6374
6375
6376/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6377FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6378{
6379 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6380 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6381}
6382
6383
6384/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6385FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6386{
6387 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6388 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6389}
6390
6391
6392/**
6393 * Common worker for MMX instructions of the form:
6394 * psrlw mm, imm8
6395 * psraw mm, imm8
6396 * psllw mm, imm8
6397 * psrld mm, imm8
6398 * psrad mm, imm8
6399 * pslld mm, imm8
6400 * psrlq mm, imm8
6401 * psllq mm, imm8
6402 *
6403 */
6404FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6405{
6406 if (IEM_IS_MODRM_REG_MODE(bRm))
6407 {
6408 /*
6409 * Register, immediate.
6410 */
6411 IEM_MC_BEGIN(0, 0);
6412 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6414 IEM_MC_ARG(uint64_t *, pDst, 0);
6415 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6416 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6417 IEM_MC_PREPARE_FPU_USAGE();
6418 IEM_MC_FPU_TO_MMX_MODE();
6419
6420 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6421 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6422 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6423
6424 IEM_MC_ADVANCE_RIP_AND_FINISH();
6425 IEM_MC_END();
6426 }
6427 else
6428 {
6429 /*
6430 * Register, memory not supported.
6431 */
6432 /// @todo Caller already enforced register mode?!
6433 AssertFailedReturn(VINF_SUCCESS);
6434 }
6435}
6436
6437
6438/**
6439 * Common worker for SSE2 instructions of the form:
6440 * psrlw xmm, imm8
6441 * psraw xmm, imm8
6442 * psllw xmm, imm8
6443 * psrld xmm, imm8
6444 * psrad xmm, imm8
6445 * pslld xmm, imm8
6446 * psrlq xmm, imm8
6447 * psllq xmm, imm8
6448 *
6449 */
6450FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6451{
6452 if (IEM_IS_MODRM_REG_MODE(bRm))
6453 {
6454 /*
6455 * Register, immediate.
6456 */
6457 IEM_MC_BEGIN(0, 0);
6458 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6460 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6461 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6462 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6463 IEM_MC_PREPARE_SSE_USAGE();
6464 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6465 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6466 IEM_MC_ADVANCE_RIP_AND_FINISH();
6467 IEM_MC_END();
6468 }
6469 else
6470 {
6471 /*
6472 * Register, memory.
6473 */
6474 /// @todo Caller already enforced register mode?!
6475 AssertFailedReturn(VINF_SUCCESS);
6476 }
6477}
6478
6479
6480/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6481FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6482{
6483// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6484 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6485}
6486
6487
6488/** Opcode 0x66 0x0f 0x71 11/2. */
6489FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6490{
6491// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6492 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6493}
6494
6495
6496/** Opcode 0x0f 0x71 11/4. */
6497FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6498{
6499// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6500 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6501}
6502
6503
6504/** Opcode 0x66 0x0f 0x71 11/4. */
6505FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6506{
6507// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6508 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6509}
6510
6511
6512/** Opcode 0x0f 0x71 11/6. */
6513FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6514{
6515// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6516 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6517}
6518
6519
6520/** Opcode 0x66 0x0f 0x71 11/6. */
6521FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6522{
6523// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6524 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6525}
6526
6527
6528/**
6529 * Group 12 jump table for register variant.
6530 */
6531IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6532{
6533 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6534 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6535 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6536 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6537 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6538 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6539 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6540 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6541};
6542AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6543
6544
6545/** Opcode 0x0f 0x71. */
6546FNIEMOP_DEF(iemOp_Grp12)
6547{
6548 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6549 if (IEM_IS_MODRM_REG_MODE(bRm))
6550 /* register, register */
6551 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6552 + pVCpu->iem.s.idxPrefix], bRm);
6553 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6554}
6555
6556
6557/** Opcode 0x0f 0x72 11/2. */
6558FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6559{
6560// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6561 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6562}
6563
6564
6565/** Opcode 0x66 0x0f 0x72 11/2. */
6566FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6567{
6568// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6569 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6570}
6571
6572
6573/** Opcode 0x0f 0x72 11/4. */
6574FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6575{
6576// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6577 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6578}
6579
6580
6581/** Opcode 0x66 0x0f 0x72 11/4. */
6582FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6583{
6584// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6585 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6586}
6587
6588
6589/** Opcode 0x0f 0x72 11/6. */
6590FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6591{
6592// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6593 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6594}
6595
6596/** Opcode 0x66 0x0f 0x72 11/6. */
6597FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6598{
6599// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6600 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6601}
6602
6603
6604/**
6605 * Group 13 jump table for register variant.
6606 */
6607IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6608{
6609 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6610 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6611 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6612 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6613 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6614 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6615 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6616 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6617};
6618AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6619
6620/** Opcode 0x0f 0x72. */
6621FNIEMOP_DEF(iemOp_Grp13)
6622{
6623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6624 if (IEM_IS_MODRM_REG_MODE(bRm))
6625 /* register, register */
6626 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6627 + pVCpu->iem.s.idxPrefix], bRm);
6628 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6629}
6630
6631
6632/** Opcode 0x0f 0x73 11/2. */
6633FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6634{
6635// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6636 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6637}
6638
6639
6640/** Opcode 0x66 0x0f 0x73 11/2. */
6641FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6642{
6643// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6644 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6645}
6646
6647
6648/** Opcode 0x66 0x0f 0x73 11/3. */
6649FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6650{
6651// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6652 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6653}
6654
6655
6656/** Opcode 0x0f 0x73 11/6. */
6657FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6658{
6659// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6660 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6661}
6662
6663
6664/** Opcode 0x66 0x0f 0x73 11/6. */
6665FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6666{
6667// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6668 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6669}
6670
6671
6672/** Opcode 0x66 0x0f 0x73 11/7. */
6673FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6674{
6675// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6676 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6677}
6678
6679/**
6680 * Group 14 jump table for register variant.
6681 */
6682IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6683{
6684 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6685 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6686 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6687 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6688 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6689 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6690 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6691 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6692};
6693AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6694
6695
6696/** Opcode 0x0f 0x73. */
6697FNIEMOP_DEF(iemOp_Grp14)
6698{
6699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6700 if (IEM_IS_MODRM_REG_MODE(bRm))
6701 /* register, register */
6702 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6703 + pVCpu->iem.s.idxPrefix], bRm);
6704 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6705}
6706
6707
6708/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
6709FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
6710{
6711 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6712 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
6713}
6714
6715
6716/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
6717FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
6718{
6719 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6720 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
6721}
6722
6723
6724/* Opcode 0xf3 0x0f 0x74 - invalid */
6725/* Opcode 0xf2 0x0f 0x74 - invalid */
6726
6727
6728/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
6729FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
6730{
6731 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6732 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
6733}
6734
6735
6736/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
6737FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
6738{
6739 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6740 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
6741}
6742
6743
6744/* Opcode 0xf3 0x0f 0x75 - invalid */
6745/* Opcode 0xf2 0x0f 0x75 - invalid */
6746
6747
6748/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
6749FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
6750{
6751 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6752 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
6753}
6754
6755
6756/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
6757FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
6758{
6759 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6760 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
6761}
6762
6763
6764/* Opcode 0xf3 0x0f 0x76 - invalid */
6765/* Opcode 0xf2 0x0f 0x76 - invalid */
6766
6767
6768/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
6769FNIEMOP_DEF(iemOp_emms)
6770{
6771 IEMOP_MNEMONIC(emms, "emms");
6772 IEM_MC_BEGIN(0, 0);
6773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6774 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6775 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6776 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6777 IEM_MC_FPU_FROM_MMX_MODE();
6778 IEM_MC_ADVANCE_RIP_AND_FINISH();
6779 IEM_MC_END();
6780}
6781
6782/* Opcode 0x66 0x0f 0x77 - invalid */
6783/* Opcode 0xf3 0x0f 0x77 - invalid */
6784/* Opcode 0xf2 0x0f 0x77 - invalid */
6785
6786/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
6787#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6788FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
6789{
6790 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
6791 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
6792 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
6793 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
6794
6795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6796 if (IEM_IS_MODRM_REG_MODE(bRm))
6797 {
6798 /*
6799 * Register, register.
6800 */
6801 if (enmEffOpSize == IEMMODE_64BIT)
6802 {
6803 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6804 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6805 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6806 IEM_MC_ARG(uint64_t, u64Enc, 1);
6807 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6808 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6809 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
6810 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
6811 iemCImpl_vmread_reg64, pu64Dst, u64Enc);
6812 IEM_MC_END();
6813 }
6814 else
6815 {
6816 IEM_MC_BEGIN(0, 0);
6817 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6819 IEM_MC_ARG(uint32_t, u32Enc, 1);
6820 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6821 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6822 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
6823 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
6824 iemCImpl_vmread_reg32, pu64Dst, u32Enc);
6825 IEM_MC_END();
6826 }
6827 }
6828 else
6829 {
6830 /*
6831 * Memory, register.
6832 */
6833 if (enmEffOpSize == IEMMODE_64BIT)
6834 {
6835 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6836 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6838 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6839 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
6840 IEM_MC_ARG(uint64_t, u64Enc, 2);
6841 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6842 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
6843 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
6844 IEM_MC_END();
6845 }
6846 else
6847 {
6848 IEM_MC_BEGIN(0, 0);
6849 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6851 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6852 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
6853 IEM_MC_ARG(uint32_t, u32Enc, 2);
6854 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6855 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
6856 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
6857 IEM_MC_END();
6858 }
6859 }
6860}
6861#else
6862FNIEMOP_UD_STUB(iemOp_vmread_Ey_Gy);
6863#endif
6864
6865/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
6866FNIEMOP_STUB(iemOp_AmdGrp17);
6867/* Opcode 0xf3 0x0f 0x78 - invalid */
6868/* Opcode 0xf2 0x0f 0x78 - invalid */
6869
6870/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
6871#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6872FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
6873{
6874 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
6875 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
6876 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
6877 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
6878
6879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6880 if (IEM_IS_MODRM_REG_MODE(bRm))
6881 {
6882 /*
6883 * Register, register.
6884 */
6885 if (enmEffOpSize == IEMMODE_64BIT)
6886 {
6887 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6888 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6889 IEM_MC_ARG(uint64_t, u64Val, 0);
6890 IEM_MC_ARG(uint64_t, u64Enc, 1);
6891 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
6892 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6893 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u64Val, u64Enc);
6894 IEM_MC_END();
6895 }
6896 else
6897 {
6898 IEM_MC_BEGIN(0, 0);
6899 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6900 IEM_MC_ARG(uint32_t, u32Val, 0);
6901 IEM_MC_ARG(uint32_t, u32Enc, 1);
6902 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
6903 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6904 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u32Val, u32Enc);
6905 IEM_MC_END();
6906 }
6907 }
6908 else
6909 {
6910 /*
6911 * Register, memory.
6912 */
6913 if (enmEffOpSize == IEMMODE_64BIT)
6914 {
6915 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6916 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6918 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6919 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
6920 IEM_MC_ARG(uint64_t, u64Enc, 2);
6921 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6922 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
6923 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
6924 IEM_MC_END();
6925 }
6926 else
6927 {
6928 IEM_MC_BEGIN(0, 0);
6929 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6931 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6932 IEM_MC_ARG(uint32_t, u32Enc, 2);
6933 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
6934 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6935 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
6936 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
6937 IEM_MC_END();
6938 }
6939 }
6940}
6941#else
6942FNIEMOP_UD_STUB(iemOp_vmwrite_Gy_Ey);
6943#endif
6944/* Opcode 0x66 0x0f 0x79 - invalid */
6945/* Opcode 0xf3 0x0f 0x79 - invalid */
6946/* Opcode 0xf2 0x0f 0x79 - invalid */
6947
6948/* Opcode 0x0f 0x7a - invalid */
6949/* Opcode 0x66 0x0f 0x7a - invalid */
6950/* Opcode 0xf3 0x0f 0x7a - invalid */
6951/* Opcode 0xf2 0x0f 0x7a - invalid */
6952
6953/* Opcode 0x0f 0x7b - invalid */
6954/* Opcode 0x66 0x0f 0x7b - invalid */
6955/* Opcode 0xf3 0x0f 0x7b - invalid */
6956/* Opcode 0xf2 0x0f 0x7b - invalid */
6957
6958/* Opcode 0x0f 0x7c - invalid */
6959
6960
6961/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
6962FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
6963{
6964 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
6965 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
6966}
6967
6968
6969/* Opcode 0xf3 0x0f 0x7c - invalid */
6970
6971
6972/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
6973FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
6974{
6975 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
6976 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
6977}
6978
6979
6980/* Opcode 0x0f 0x7d - invalid */
6981
6982
6983/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
6984FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
6985{
6986 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
6987 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
6988}
6989
6990
6991/* Opcode 0xf3 0x0f 0x7d - invalid */
6992
6993
6994/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
6995FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
6996{
6997 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
6998 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
6999}
7000
7001
7002/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7003FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7004{
7005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7006 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7007 {
7008 /**
7009 * @opcode 0x7e
7010 * @opcodesub rex.w=1
7011 * @oppfx none
7012 * @opcpuid mmx
7013 * @opgroup og_mmx_datamove
7014 * @opxcpttype 5
7015 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7016 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7017 */
7018 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7019 if (IEM_IS_MODRM_REG_MODE(bRm))
7020 {
7021 /* greg64, MMX */
7022 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7024 IEM_MC_LOCAL(uint64_t, u64Tmp);
7025
7026 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7027 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7028 IEM_MC_FPU_TO_MMX_MODE();
7029
7030 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7031 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7032
7033 IEM_MC_ADVANCE_RIP_AND_FINISH();
7034 IEM_MC_END();
7035 }
7036 else
7037 {
7038 /* [mem64], MMX */
7039 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7041 IEM_MC_LOCAL(uint64_t, u64Tmp);
7042
7043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7045 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7046 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7047
7048 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7049 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7050 IEM_MC_FPU_TO_MMX_MODE();
7051
7052 IEM_MC_ADVANCE_RIP_AND_FINISH();
7053 IEM_MC_END();
7054 }
7055 }
7056 else
7057 {
7058 /**
7059 * @opdone
7060 * @opcode 0x7e
7061 * @opcodesub rex.w=0
7062 * @oppfx none
7063 * @opcpuid mmx
7064 * @opgroup og_mmx_datamove
7065 * @opxcpttype 5
7066 * @opfunction iemOp_movd_q_Pd_Ey
7067 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7068 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7069 */
7070 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7071 if (IEM_IS_MODRM_REG_MODE(bRm))
7072 {
7073 /* greg32, MMX */
7074 IEM_MC_BEGIN(0, 0);
7075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7076 IEM_MC_LOCAL(uint32_t, u32Tmp);
7077
7078 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7079 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7080 IEM_MC_FPU_TO_MMX_MODE();
7081
7082 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm), 0);
7083 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7084
7085 IEM_MC_ADVANCE_RIP_AND_FINISH();
7086 IEM_MC_END();
7087 }
7088 else
7089 {
7090 /* [mem32], MMX */
7091 IEM_MC_BEGIN(0, 0);
7092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7093 IEM_MC_LOCAL(uint32_t, u32Tmp);
7094
7095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7097 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7098 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7099
7100 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm), 0);
7101 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7102 IEM_MC_FPU_TO_MMX_MODE();
7103
7104 IEM_MC_ADVANCE_RIP_AND_FINISH();
7105 IEM_MC_END();
7106 }
7107 }
7108}
7109
7110
7111FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7112{
7113 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7114 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7115 {
7116 /**
7117 * @opcode 0x7e
7118 * @opcodesub rex.w=1
7119 * @oppfx 0x66
7120 * @opcpuid sse2
7121 * @opgroup og_sse2_simdint_datamove
7122 * @opxcpttype 5
7123 * @optest 64-bit / op1=1 op2=2 -> op1=2
7124 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7125 */
7126 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7127 if (IEM_IS_MODRM_REG_MODE(bRm))
7128 {
7129 /* greg64, XMM */
7130 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7132 IEM_MC_LOCAL(uint64_t, u64Tmp);
7133
7134 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7135 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7136
7137 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7138 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7139
7140 IEM_MC_ADVANCE_RIP_AND_FINISH();
7141 IEM_MC_END();
7142 }
7143 else
7144 {
7145 /* [mem64], XMM */
7146 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7148 IEM_MC_LOCAL(uint64_t, u64Tmp);
7149
7150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7152 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7153 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7154
7155 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7156 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7157
7158 IEM_MC_ADVANCE_RIP_AND_FINISH();
7159 IEM_MC_END();
7160 }
7161 }
7162 else
7163 {
7164 /**
7165 * @opdone
7166 * @opcode 0x7e
7167 * @opcodesub rex.w=0
7168 * @oppfx 0x66
7169 * @opcpuid sse2
7170 * @opgroup og_sse2_simdint_datamove
7171 * @opxcpttype 5
7172 * @opfunction iemOp_movd_q_Vy_Ey
7173 * @optest op1=1 op2=2 -> op1=2
7174 * @optest op1=0 op2=-42 -> op1=-42
7175 */
7176 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7177 if (IEM_IS_MODRM_REG_MODE(bRm))
7178 {
7179 /* greg32, XMM */
7180 IEM_MC_BEGIN(0, 0);
7181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7182 IEM_MC_LOCAL(uint32_t, u32Tmp);
7183
7184 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7185 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7186
7187 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7188 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7189
7190 IEM_MC_ADVANCE_RIP_AND_FINISH();
7191 IEM_MC_END();
7192 }
7193 else
7194 {
7195 /* [mem32], XMM */
7196 IEM_MC_BEGIN(0, 0);
7197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7198 IEM_MC_LOCAL(uint32_t, u32Tmp);
7199
7200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7202 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7203 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7204
7205 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7206 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7207
7208 IEM_MC_ADVANCE_RIP_AND_FINISH();
7209 IEM_MC_END();
7210 }
7211 }
7212}
7213
7214/**
7215 * @opcode 0x7e
7216 * @oppfx 0xf3
7217 * @opcpuid sse2
7218 * @opgroup og_sse2_pcksclr_datamove
7219 * @opxcpttype none
7220 * @optest op1=1 op2=2 -> op1=2
7221 * @optest op1=0 op2=-42 -> op1=-42
7222 */
7223FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7224{
7225 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7227 if (IEM_IS_MODRM_REG_MODE(bRm))
7228 {
7229 /*
7230 * XMM128, XMM64.
7231 */
7232 IEM_MC_BEGIN(0, 0);
7233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7234 IEM_MC_LOCAL(uint64_t, uSrc);
7235
7236 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7237 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7238
7239 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7240 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7241
7242 IEM_MC_ADVANCE_RIP_AND_FINISH();
7243 IEM_MC_END();
7244 }
7245 else
7246 {
7247 /*
7248 * XMM128, [mem64].
7249 */
7250 IEM_MC_BEGIN(0, 0);
7251 IEM_MC_LOCAL(uint64_t, uSrc);
7252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7253
7254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7256 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7257 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7258
7259 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7260 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7261
7262 IEM_MC_ADVANCE_RIP_AND_FINISH();
7263 IEM_MC_END();
7264 }
7265}
7266
7267/* Opcode 0xf2 0x0f 0x7e - invalid */
7268
7269
7270/** Opcode 0x0f 0x7f - movq Qq, Pq */
7271FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7272{
7273 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7274 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7275 if (IEM_IS_MODRM_REG_MODE(bRm))
7276 {
7277 /*
7278 * MMX, MMX.
7279 */
7280 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7281 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7282 IEM_MC_BEGIN(0, 0);
7283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7284 IEM_MC_LOCAL(uint64_t, u64Tmp);
7285 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7286 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7287 IEM_MC_FPU_TO_MMX_MODE();
7288
7289 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7290 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7291
7292 IEM_MC_ADVANCE_RIP_AND_FINISH();
7293 IEM_MC_END();
7294 }
7295 else
7296 {
7297 /*
7298 * [mem64], MMX.
7299 */
7300 IEM_MC_BEGIN(0, 0);
7301 IEM_MC_LOCAL(uint64_t, u64Tmp);
7302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7303
7304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7306 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7307 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7308
7309 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7310 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7311 IEM_MC_FPU_TO_MMX_MODE();
7312
7313 IEM_MC_ADVANCE_RIP_AND_FINISH();
7314 IEM_MC_END();
7315 }
7316}
7317
7318/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7319FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7320{
7321 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7322 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7323 if (IEM_IS_MODRM_REG_MODE(bRm))
7324 {
7325 /*
7326 * XMM, XMM.
7327 */
7328 IEM_MC_BEGIN(0, 0);
7329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7330 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7331 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7332 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7333 IEM_GET_MODRM_REG(pVCpu, bRm));
7334 IEM_MC_ADVANCE_RIP_AND_FINISH();
7335 IEM_MC_END();
7336 }
7337 else
7338 {
7339 /*
7340 * [mem128], XMM.
7341 */
7342 IEM_MC_BEGIN(0, 0);
7343 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7345
7346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7348 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7349 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7350
7351 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7352 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7353
7354 IEM_MC_ADVANCE_RIP_AND_FINISH();
7355 IEM_MC_END();
7356 }
7357}
7358
7359/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7360FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7361{
7362 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7363 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7364 if (IEM_IS_MODRM_REG_MODE(bRm))
7365 {
7366 /*
7367 * XMM, XMM.
7368 */
7369 IEM_MC_BEGIN(0, 0);
7370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7371 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7372 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7373 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7374 IEM_GET_MODRM_REG(pVCpu, bRm));
7375 IEM_MC_ADVANCE_RIP_AND_FINISH();
7376 IEM_MC_END();
7377 }
7378 else
7379 {
7380 /*
7381 * [mem128], XMM.
7382 */
7383 IEM_MC_BEGIN(0, 0);
7384 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7386
7387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7389 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7390 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7391
7392 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7393 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7394
7395 IEM_MC_ADVANCE_RIP_AND_FINISH();
7396 IEM_MC_END();
7397 }
7398}
7399
7400/* Opcode 0xf2 0x0f 0x7f - invalid */
7401
7402
7403/**
7404 * @opcode 0x80
7405 * @opfltest of
7406 */
7407FNIEMOP_DEF(iemOp_jo_Jv)
7408{
7409 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7410 IEMOP_HLP_MIN_386();
7411 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7412 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7413 {
7414 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7415 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7417 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7418 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7419 } IEM_MC_ELSE() {
7420 IEM_MC_ADVANCE_RIP_AND_FINISH();
7421 } IEM_MC_ENDIF();
7422 IEM_MC_END();
7423 }
7424 else
7425 {
7426 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7427 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7429 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7430 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7431 } IEM_MC_ELSE() {
7432 IEM_MC_ADVANCE_RIP_AND_FINISH();
7433 } IEM_MC_ENDIF();
7434 IEM_MC_END();
7435 }
7436}
7437
7438
7439/**
7440 * @opcode 0x81
7441 * @opfltest of
7442 */
7443FNIEMOP_DEF(iemOp_jno_Jv)
7444{
7445 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7446 IEMOP_HLP_MIN_386();
7447 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7448 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7449 {
7450 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7451 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7453 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7454 IEM_MC_ADVANCE_RIP_AND_FINISH();
7455 } IEM_MC_ELSE() {
7456 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7457 } IEM_MC_ENDIF();
7458 IEM_MC_END();
7459 }
7460 else
7461 {
7462 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7463 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7465 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7466 IEM_MC_ADVANCE_RIP_AND_FINISH();
7467 } IEM_MC_ELSE() {
7468 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7469 } IEM_MC_ENDIF();
7470 IEM_MC_END();
7471 }
7472}
7473
7474
7475/**
7476 * @opcode 0x82
7477 * @opfltest cf
7478 */
7479FNIEMOP_DEF(iemOp_jc_Jv)
7480{
7481 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7482 IEMOP_HLP_MIN_386();
7483 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7484 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7485 {
7486 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7487 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7489 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7490 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7491 } IEM_MC_ELSE() {
7492 IEM_MC_ADVANCE_RIP_AND_FINISH();
7493 } IEM_MC_ENDIF();
7494 IEM_MC_END();
7495 }
7496 else
7497 {
7498 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7499 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7501 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7502 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7503 } IEM_MC_ELSE() {
7504 IEM_MC_ADVANCE_RIP_AND_FINISH();
7505 } IEM_MC_ENDIF();
7506 IEM_MC_END();
7507 }
7508}
7509
7510
7511/**
7512 * @opcode 0x83
7513 * @opfltest cf
7514 */
7515FNIEMOP_DEF(iemOp_jnc_Jv)
7516{
7517 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7518 IEMOP_HLP_MIN_386();
7519 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7520 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7521 {
7522 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7523 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7525 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7526 IEM_MC_ADVANCE_RIP_AND_FINISH();
7527 } IEM_MC_ELSE() {
7528 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7529 } IEM_MC_ENDIF();
7530 IEM_MC_END();
7531 }
7532 else
7533 {
7534 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7535 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7537 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7538 IEM_MC_ADVANCE_RIP_AND_FINISH();
7539 } IEM_MC_ELSE() {
7540 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7541 } IEM_MC_ENDIF();
7542 IEM_MC_END();
7543 }
7544}
7545
7546
7547/**
7548 * @opcode 0x84
7549 * @opfltest zf
7550 */
7551FNIEMOP_DEF(iemOp_je_Jv)
7552{
7553 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7554 IEMOP_HLP_MIN_386();
7555 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7556 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7557 {
7558 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7559 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7561 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7562 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7563 } IEM_MC_ELSE() {
7564 IEM_MC_ADVANCE_RIP_AND_FINISH();
7565 } IEM_MC_ENDIF();
7566 IEM_MC_END();
7567 }
7568 else
7569 {
7570 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7571 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7573 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7574 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7575 } IEM_MC_ELSE() {
7576 IEM_MC_ADVANCE_RIP_AND_FINISH();
7577 } IEM_MC_ENDIF();
7578 IEM_MC_END();
7579 }
7580}
7581
7582
7583/**
7584 * @opcode 0x85
7585 * @opfltest zf
7586 */
7587FNIEMOP_DEF(iemOp_jne_Jv)
7588{
7589 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7590 IEMOP_HLP_MIN_386();
7591 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7592 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7593 {
7594 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7595 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7597 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7598 IEM_MC_ADVANCE_RIP_AND_FINISH();
7599 } IEM_MC_ELSE() {
7600 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7601 } IEM_MC_ENDIF();
7602 IEM_MC_END();
7603 }
7604 else
7605 {
7606 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7607 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7609 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7610 IEM_MC_ADVANCE_RIP_AND_FINISH();
7611 } IEM_MC_ELSE() {
7612 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7613 } IEM_MC_ENDIF();
7614 IEM_MC_END();
7615 }
7616}
7617
7618
7619/**
7620 * @opcode 0x86
7621 * @opfltest cf,zf
7622 */
7623FNIEMOP_DEF(iemOp_jbe_Jv)
7624{
7625 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7626 IEMOP_HLP_MIN_386();
7627 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7628 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7629 {
7630 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7631 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7633 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7634 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7635 } IEM_MC_ELSE() {
7636 IEM_MC_ADVANCE_RIP_AND_FINISH();
7637 } IEM_MC_ENDIF();
7638 IEM_MC_END();
7639 }
7640 else
7641 {
7642 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7643 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7645 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7646 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7647 } IEM_MC_ELSE() {
7648 IEM_MC_ADVANCE_RIP_AND_FINISH();
7649 } IEM_MC_ENDIF();
7650 IEM_MC_END();
7651 }
7652}
7653
7654
7655/**
7656 * @opcode 0x87
7657 * @opfltest cf,zf
7658 */
7659FNIEMOP_DEF(iemOp_jnbe_Jv)
7660{
7661 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7662 IEMOP_HLP_MIN_386();
7663 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7664 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7665 {
7666 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7667 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7669 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7670 IEM_MC_ADVANCE_RIP_AND_FINISH();
7671 } IEM_MC_ELSE() {
7672 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7673 } IEM_MC_ENDIF();
7674 IEM_MC_END();
7675 }
7676 else
7677 {
7678 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7679 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7681 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7682 IEM_MC_ADVANCE_RIP_AND_FINISH();
7683 } IEM_MC_ELSE() {
7684 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7685 } IEM_MC_ENDIF();
7686 IEM_MC_END();
7687 }
7688}
7689
7690
7691/**
7692 * @opcode 0x88
7693 * @opfltest sf
7694 */
7695FNIEMOP_DEF(iemOp_js_Jv)
7696{
7697 IEMOP_MNEMONIC(js_Jv, "js Jv");
7698 IEMOP_HLP_MIN_386();
7699 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7700 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7701 {
7702 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7703 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7705 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7706 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7707 } IEM_MC_ELSE() {
7708 IEM_MC_ADVANCE_RIP_AND_FINISH();
7709 } IEM_MC_ENDIF();
7710 IEM_MC_END();
7711 }
7712 else
7713 {
7714 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7715 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7717 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7718 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7719 } IEM_MC_ELSE() {
7720 IEM_MC_ADVANCE_RIP_AND_FINISH();
7721 } IEM_MC_ENDIF();
7722 IEM_MC_END();
7723 }
7724}
7725
7726
7727/**
7728 * @opcode 0x89
7729 * @opfltest sf
7730 */
7731FNIEMOP_DEF(iemOp_jns_Jv)
7732{
7733 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
7734 IEMOP_HLP_MIN_386();
7735 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7736 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7737 {
7738 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7739 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7741 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7742 IEM_MC_ADVANCE_RIP_AND_FINISH();
7743 } IEM_MC_ELSE() {
7744 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7745 } IEM_MC_ENDIF();
7746 IEM_MC_END();
7747 }
7748 else
7749 {
7750 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7751 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7753 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7754 IEM_MC_ADVANCE_RIP_AND_FINISH();
7755 } IEM_MC_ELSE() {
7756 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7757 } IEM_MC_ENDIF();
7758 IEM_MC_END();
7759 }
7760}
7761
7762
7763/**
7764 * @opcode 0x8a
7765 * @opfltest pf
7766 */
7767FNIEMOP_DEF(iemOp_jp_Jv)
7768{
7769 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
7770 IEMOP_HLP_MIN_386();
7771 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7772 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7773 {
7774 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7775 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7777 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7778 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7779 } IEM_MC_ELSE() {
7780 IEM_MC_ADVANCE_RIP_AND_FINISH();
7781 } IEM_MC_ENDIF();
7782 IEM_MC_END();
7783 }
7784 else
7785 {
7786 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7787 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7789 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7790 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7791 } IEM_MC_ELSE() {
7792 IEM_MC_ADVANCE_RIP_AND_FINISH();
7793 } IEM_MC_ENDIF();
7794 IEM_MC_END();
7795 }
7796}
7797
7798
7799/**
7800 * @opcode 0x8b
7801 * @opfltest pf
7802 */
7803FNIEMOP_DEF(iemOp_jnp_Jv)
7804{
7805 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
7806 IEMOP_HLP_MIN_386();
7807 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7808 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7809 {
7810 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7811 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7813 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7814 IEM_MC_ADVANCE_RIP_AND_FINISH();
7815 } IEM_MC_ELSE() {
7816 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7817 } IEM_MC_ENDIF();
7818 IEM_MC_END();
7819 }
7820 else
7821 {
7822 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7823 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7825 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7826 IEM_MC_ADVANCE_RIP_AND_FINISH();
7827 } IEM_MC_ELSE() {
7828 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7829 } IEM_MC_ENDIF();
7830 IEM_MC_END();
7831 }
7832}
7833
7834
7835/**
7836 * @opcode 0x8c
7837 * @opfltest sf,of
7838 */
7839FNIEMOP_DEF(iemOp_jl_Jv)
7840{
7841 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
7842 IEMOP_HLP_MIN_386();
7843 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7844 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7845 {
7846 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7847 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7849 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7850 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7851 } IEM_MC_ELSE() {
7852 IEM_MC_ADVANCE_RIP_AND_FINISH();
7853 } IEM_MC_ENDIF();
7854 IEM_MC_END();
7855 }
7856 else
7857 {
7858 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7859 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7861 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7862 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7863 } IEM_MC_ELSE() {
7864 IEM_MC_ADVANCE_RIP_AND_FINISH();
7865 } IEM_MC_ENDIF();
7866 IEM_MC_END();
7867 }
7868}
7869
7870
7871/**
7872 * @opcode 0x8d
7873 * @opfltest sf,of
7874 */
7875FNIEMOP_DEF(iemOp_jnl_Jv)
7876{
7877 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
7878 IEMOP_HLP_MIN_386();
7879 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7880 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7881 {
7882 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7883 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7885 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7886 IEM_MC_ADVANCE_RIP_AND_FINISH();
7887 } IEM_MC_ELSE() {
7888 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7889 } IEM_MC_ENDIF();
7890 IEM_MC_END();
7891 }
7892 else
7893 {
7894 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7895 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7897 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7898 IEM_MC_ADVANCE_RIP_AND_FINISH();
7899 } IEM_MC_ELSE() {
7900 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7901 } IEM_MC_ENDIF();
7902 IEM_MC_END();
7903 }
7904}
7905
7906
7907/**
7908 * @opcode 0x8e
7909 * @opfltest zf,sf,of
7910 */
7911FNIEMOP_DEF(iemOp_jle_Jv)
7912{
7913 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
7914 IEMOP_HLP_MIN_386();
7915 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7916 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7917 {
7918 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7919 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7921 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7922 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7923 } IEM_MC_ELSE() {
7924 IEM_MC_ADVANCE_RIP_AND_FINISH();
7925 } IEM_MC_ENDIF();
7926 IEM_MC_END();
7927 }
7928 else
7929 {
7930 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7931 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7933 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7934 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7935 } IEM_MC_ELSE() {
7936 IEM_MC_ADVANCE_RIP_AND_FINISH();
7937 } IEM_MC_ENDIF();
7938 IEM_MC_END();
7939 }
7940}
7941
7942
7943/**
7944 * @opcode 0x8f
7945 * @opfltest zf,sf,of
7946 */
7947FNIEMOP_DEF(iemOp_jnle_Jv)
7948{
7949 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
7950 IEMOP_HLP_MIN_386();
7951 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7952 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7953 {
7954 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7955 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7957 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7958 IEM_MC_ADVANCE_RIP_AND_FINISH();
7959 } IEM_MC_ELSE() {
7960 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7961 } IEM_MC_ENDIF();
7962 IEM_MC_END();
7963 }
7964 else
7965 {
7966 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7967 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7969 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7970 IEM_MC_ADVANCE_RIP_AND_FINISH();
7971 } IEM_MC_ELSE() {
7972 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7973 } IEM_MC_ENDIF();
7974 IEM_MC_END();
7975 }
7976}
7977
7978
7979/**
7980 * @opcode 0x90
7981 * @opfltest of
7982 */
7983FNIEMOP_DEF(iemOp_seto_Eb)
7984{
7985 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
7986 IEMOP_HLP_MIN_386();
7987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7988
7989 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7990 * any way. AMD says it's "unused", whatever that means. We're
7991 * ignoring for now. */
7992 if (IEM_IS_MODRM_REG_MODE(bRm))
7993 {
7994 /* register target */
7995 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7997 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7998 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7999 } IEM_MC_ELSE() {
8000 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8001 } IEM_MC_ENDIF();
8002 IEM_MC_ADVANCE_RIP_AND_FINISH();
8003 IEM_MC_END();
8004 }
8005 else
8006 {
8007 /* memory target */
8008 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8012 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8013 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8014 } IEM_MC_ELSE() {
8015 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8016 } IEM_MC_ENDIF();
8017 IEM_MC_ADVANCE_RIP_AND_FINISH();
8018 IEM_MC_END();
8019 }
8020}
8021
8022
8023/**
8024 * @opcode 0x91
8025 * @opfltest of
8026 */
8027FNIEMOP_DEF(iemOp_setno_Eb)
8028{
8029 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8030 IEMOP_HLP_MIN_386();
8031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8032
8033 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8034 * any way. AMD says it's "unused", whatever that means. We're
8035 * ignoring for now. */
8036 if (IEM_IS_MODRM_REG_MODE(bRm))
8037 {
8038 /* register target */
8039 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8041 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8042 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8043 } IEM_MC_ELSE() {
8044 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8045 } IEM_MC_ENDIF();
8046 IEM_MC_ADVANCE_RIP_AND_FINISH();
8047 IEM_MC_END();
8048 }
8049 else
8050 {
8051 /* memory target */
8052 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8056 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8057 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8058 } IEM_MC_ELSE() {
8059 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8060 } IEM_MC_ENDIF();
8061 IEM_MC_ADVANCE_RIP_AND_FINISH();
8062 IEM_MC_END();
8063 }
8064}
8065
8066
8067/**
8068 * @opcode 0x92
8069 * @opfltest cf
8070 */
8071FNIEMOP_DEF(iemOp_setc_Eb)
8072{
8073 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8074 IEMOP_HLP_MIN_386();
8075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8076
8077 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8078 * any way. AMD says it's "unused", whatever that means. We're
8079 * ignoring for now. */
8080 if (IEM_IS_MODRM_REG_MODE(bRm))
8081 {
8082 /* register target */
8083 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8085 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8086 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8087 } IEM_MC_ELSE() {
8088 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8089 } IEM_MC_ENDIF();
8090 IEM_MC_ADVANCE_RIP_AND_FINISH();
8091 IEM_MC_END();
8092 }
8093 else
8094 {
8095 /* memory target */
8096 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8100 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8101 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8102 } IEM_MC_ELSE() {
8103 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8104 } IEM_MC_ENDIF();
8105 IEM_MC_ADVANCE_RIP_AND_FINISH();
8106 IEM_MC_END();
8107 }
8108}
8109
8110
8111/**
8112 * @opcode 0x93
8113 * @opfltest cf
8114 */
8115FNIEMOP_DEF(iemOp_setnc_Eb)
8116{
8117 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8118 IEMOP_HLP_MIN_386();
8119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8120
8121 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8122 * any way. AMD says it's "unused", whatever that means. We're
8123 * ignoring for now. */
8124 if (IEM_IS_MODRM_REG_MODE(bRm))
8125 {
8126 /* register target */
8127 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8129 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8130 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8131 } IEM_MC_ELSE() {
8132 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8133 } IEM_MC_ENDIF();
8134 IEM_MC_ADVANCE_RIP_AND_FINISH();
8135 IEM_MC_END();
8136 }
8137 else
8138 {
8139 /* memory target */
8140 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8144 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8145 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8146 } IEM_MC_ELSE() {
8147 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8148 } IEM_MC_ENDIF();
8149 IEM_MC_ADVANCE_RIP_AND_FINISH();
8150 IEM_MC_END();
8151 }
8152}
8153
8154
8155/**
8156 * @opcode 0x94
8157 * @opfltest zf
8158 */
8159FNIEMOP_DEF(iemOp_sete_Eb)
8160{
8161 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8162 IEMOP_HLP_MIN_386();
8163 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8164
8165 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8166 * any way. AMD says it's "unused", whatever that means. We're
8167 * ignoring for now. */
8168 if (IEM_IS_MODRM_REG_MODE(bRm))
8169 {
8170 /* register target */
8171 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8173 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8174 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8175 } IEM_MC_ELSE() {
8176 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8177 } IEM_MC_ENDIF();
8178 IEM_MC_ADVANCE_RIP_AND_FINISH();
8179 IEM_MC_END();
8180 }
8181 else
8182 {
8183 /* memory target */
8184 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8188 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8189 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8190 } IEM_MC_ELSE() {
8191 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8192 } IEM_MC_ENDIF();
8193 IEM_MC_ADVANCE_RIP_AND_FINISH();
8194 IEM_MC_END();
8195 }
8196}
8197
8198
8199/**
8200 * @opcode 0x95
8201 * @opfltest zf
8202 */
8203FNIEMOP_DEF(iemOp_setne_Eb)
8204{
8205 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8206 IEMOP_HLP_MIN_386();
8207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8208
8209 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8210 * any way. AMD says it's "unused", whatever that means. We're
8211 * ignoring for now. */
8212 if (IEM_IS_MODRM_REG_MODE(bRm))
8213 {
8214 /* register target */
8215 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8217 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8218 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8219 } IEM_MC_ELSE() {
8220 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8221 } IEM_MC_ENDIF();
8222 IEM_MC_ADVANCE_RIP_AND_FINISH();
8223 IEM_MC_END();
8224 }
8225 else
8226 {
8227 /* memory target */
8228 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8232 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8233 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8234 } IEM_MC_ELSE() {
8235 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8236 } IEM_MC_ENDIF();
8237 IEM_MC_ADVANCE_RIP_AND_FINISH();
8238 IEM_MC_END();
8239 }
8240}
8241
8242
8243/**
8244 * @opcode 0x96
8245 * @opfltest cf,zf
8246 */
8247FNIEMOP_DEF(iemOp_setbe_Eb)
8248{
8249 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8250 IEMOP_HLP_MIN_386();
8251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8252
8253 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8254 * any way. AMD says it's "unused", whatever that means. We're
8255 * ignoring for now. */
8256 if (IEM_IS_MODRM_REG_MODE(bRm))
8257 {
8258 /* register target */
8259 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8261 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8262 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8263 } IEM_MC_ELSE() {
8264 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8265 } IEM_MC_ENDIF();
8266 IEM_MC_ADVANCE_RIP_AND_FINISH();
8267 IEM_MC_END();
8268 }
8269 else
8270 {
8271 /* memory target */
8272 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8276 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8277 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8278 } IEM_MC_ELSE() {
8279 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8280 } IEM_MC_ENDIF();
8281 IEM_MC_ADVANCE_RIP_AND_FINISH();
8282 IEM_MC_END();
8283 }
8284}
8285
8286
8287/**
8288 * @opcode 0x97
8289 * @opfltest cf,zf
8290 */
8291FNIEMOP_DEF(iemOp_setnbe_Eb)
8292{
8293 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8294 IEMOP_HLP_MIN_386();
8295 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8296
8297 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8298 * any way. AMD says it's "unused", whatever that means. We're
8299 * ignoring for now. */
8300 if (IEM_IS_MODRM_REG_MODE(bRm))
8301 {
8302 /* register target */
8303 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8305 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8306 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8307 } IEM_MC_ELSE() {
8308 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8309 } IEM_MC_ENDIF();
8310 IEM_MC_ADVANCE_RIP_AND_FINISH();
8311 IEM_MC_END();
8312 }
8313 else
8314 {
8315 /* memory target */
8316 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8320 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8321 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8322 } IEM_MC_ELSE() {
8323 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8324 } IEM_MC_ENDIF();
8325 IEM_MC_ADVANCE_RIP_AND_FINISH();
8326 IEM_MC_END();
8327 }
8328}
8329
8330
8331/**
8332 * @opcode 0x98
8333 * @opfltest sf
8334 */
8335FNIEMOP_DEF(iemOp_sets_Eb)
8336{
8337 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8338 IEMOP_HLP_MIN_386();
8339 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8340
8341 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8342 * any way. AMD says it's "unused", whatever that means. We're
8343 * ignoring for now. */
8344 if (IEM_IS_MODRM_REG_MODE(bRm))
8345 {
8346 /* register target */
8347 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8349 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8350 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8351 } IEM_MC_ELSE() {
8352 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8353 } IEM_MC_ENDIF();
8354 IEM_MC_ADVANCE_RIP_AND_FINISH();
8355 IEM_MC_END();
8356 }
8357 else
8358 {
8359 /* memory target */
8360 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8364 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8365 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8366 } IEM_MC_ELSE() {
8367 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8368 } IEM_MC_ENDIF();
8369 IEM_MC_ADVANCE_RIP_AND_FINISH();
8370 IEM_MC_END();
8371 }
8372}
8373
8374
8375/**
8376 * @opcode 0x99
8377 * @opfltest sf
8378 */
8379FNIEMOP_DEF(iemOp_setns_Eb)
8380{
8381 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8382 IEMOP_HLP_MIN_386();
8383 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8384
8385 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8386 * any way. AMD says it's "unused", whatever that means. We're
8387 * ignoring for now. */
8388 if (IEM_IS_MODRM_REG_MODE(bRm))
8389 {
8390 /* register target */
8391 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8393 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8394 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8395 } IEM_MC_ELSE() {
8396 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8397 } IEM_MC_ENDIF();
8398 IEM_MC_ADVANCE_RIP_AND_FINISH();
8399 IEM_MC_END();
8400 }
8401 else
8402 {
8403 /* memory target */
8404 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8408 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8409 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8410 } IEM_MC_ELSE() {
8411 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8412 } IEM_MC_ENDIF();
8413 IEM_MC_ADVANCE_RIP_AND_FINISH();
8414 IEM_MC_END();
8415 }
8416}
8417
8418
8419/**
8420 * @opcode 0x9a
8421 * @opfltest pf
8422 */
8423FNIEMOP_DEF(iemOp_setp_Eb)
8424{
8425 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8426 IEMOP_HLP_MIN_386();
8427 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8428
8429 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8430 * any way. AMD says it's "unused", whatever that means. We're
8431 * ignoring for now. */
8432 if (IEM_IS_MODRM_REG_MODE(bRm))
8433 {
8434 /* register target */
8435 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8437 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8438 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8439 } IEM_MC_ELSE() {
8440 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8441 } IEM_MC_ENDIF();
8442 IEM_MC_ADVANCE_RIP_AND_FINISH();
8443 IEM_MC_END();
8444 }
8445 else
8446 {
8447 /* memory target */
8448 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8452 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8453 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8454 } IEM_MC_ELSE() {
8455 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8456 } IEM_MC_ENDIF();
8457 IEM_MC_ADVANCE_RIP_AND_FINISH();
8458 IEM_MC_END();
8459 }
8460}
8461
8462
8463/**
8464 * @opcode 0x9b
8465 * @opfltest pf
8466 */
8467FNIEMOP_DEF(iemOp_setnp_Eb)
8468{
8469 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8470 IEMOP_HLP_MIN_386();
8471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8472
8473 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8474 * any way. AMD says it's "unused", whatever that means. We're
8475 * ignoring for now. */
8476 if (IEM_IS_MODRM_REG_MODE(bRm))
8477 {
8478 /* register target */
8479 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8481 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8482 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8483 } IEM_MC_ELSE() {
8484 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8485 } IEM_MC_ENDIF();
8486 IEM_MC_ADVANCE_RIP_AND_FINISH();
8487 IEM_MC_END();
8488 }
8489 else
8490 {
8491 /* memory target */
8492 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8496 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8497 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8498 } IEM_MC_ELSE() {
8499 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8500 } IEM_MC_ENDIF();
8501 IEM_MC_ADVANCE_RIP_AND_FINISH();
8502 IEM_MC_END();
8503 }
8504}
8505
8506
8507/**
8508 * @opcode 0x9c
8509 * @opfltest sf,of
8510 */
8511FNIEMOP_DEF(iemOp_setl_Eb)
8512{
8513 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8514 IEMOP_HLP_MIN_386();
8515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8516
8517 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8518 * any way. AMD says it's "unused", whatever that means. We're
8519 * ignoring for now. */
8520 if (IEM_IS_MODRM_REG_MODE(bRm))
8521 {
8522 /* register target */
8523 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8525 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8526 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8527 } IEM_MC_ELSE() {
8528 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8529 } IEM_MC_ENDIF();
8530 IEM_MC_ADVANCE_RIP_AND_FINISH();
8531 IEM_MC_END();
8532 }
8533 else
8534 {
8535 /* memory target */
8536 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8540 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8541 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8542 } IEM_MC_ELSE() {
8543 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8544 } IEM_MC_ENDIF();
8545 IEM_MC_ADVANCE_RIP_AND_FINISH();
8546 IEM_MC_END();
8547 }
8548}
8549
8550
8551/**
8552 * @opcode 0x9d
8553 * @opfltest sf,of
8554 */
8555FNIEMOP_DEF(iemOp_setnl_Eb)
8556{
8557 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8558 IEMOP_HLP_MIN_386();
8559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8560
8561 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8562 * any way. AMD says it's "unused", whatever that means. We're
8563 * ignoring for now. */
8564 if (IEM_IS_MODRM_REG_MODE(bRm))
8565 {
8566 /* register target */
8567 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8569 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8570 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8571 } IEM_MC_ELSE() {
8572 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8573 } IEM_MC_ENDIF();
8574 IEM_MC_ADVANCE_RIP_AND_FINISH();
8575 IEM_MC_END();
8576 }
8577 else
8578 {
8579 /* memory target */
8580 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8584 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8585 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8586 } IEM_MC_ELSE() {
8587 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8588 } IEM_MC_ENDIF();
8589 IEM_MC_ADVANCE_RIP_AND_FINISH();
8590 IEM_MC_END();
8591 }
8592}
8593
8594
8595/**
8596 * @opcode 0x9e
8597 * @opfltest zf,sf,of
8598 */
8599FNIEMOP_DEF(iemOp_setle_Eb)
8600{
8601 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8602 IEMOP_HLP_MIN_386();
8603 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8604
8605 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8606 * any way. AMD says it's "unused", whatever that means. We're
8607 * ignoring for now. */
8608 if (IEM_IS_MODRM_REG_MODE(bRm))
8609 {
8610 /* register target */
8611 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8613 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8614 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8615 } IEM_MC_ELSE() {
8616 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8617 } IEM_MC_ENDIF();
8618 IEM_MC_ADVANCE_RIP_AND_FINISH();
8619 IEM_MC_END();
8620 }
8621 else
8622 {
8623 /* memory target */
8624 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8628 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8629 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8630 } IEM_MC_ELSE() {
8631 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8632 } IEM_MC_ENDIF();
8633 IEM_MC_ADVANCE_RIP_AND_FINISH();
8634 IEM_MC_END();
8635 }
8636}
8637
8638
8639/**
8640 * @opcode 0x9f
8641 * @opfltest zf,sf,of
8642 */
8643FNIEMOP_DEF(iemOp_setnle_Eb)
8644{
8645 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8646 IEMOP_HLP_MIN_386();
8647 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8648
8649 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8650 * any way. AMD says it's "unused", whatever that means. We're
8651 * ignoring for now. */
8652 if (IEM_IS_MODRM_REG_MODE(bRm))
8653 {
8654 /* register target */
8655 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8657 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8658 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8659 } IEM_MC_ELSE() {
8660 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8661 } IEM_MC_ENDIF();
8662 IEM_MC_ADVANCE_RIP_AND_FINISH();
8663 IEM_MC_END();
8664 }
8665 else
8666 {
8667 /* memory target */
8668 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8672 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8673 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8674 } IEM_MC_ELSE() {
8675 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8676 } IEM_MC_ENDIF();
8677 IEM_MC_ADVANCE_RIP_AND_FINISH();
8678 IEM_MC_END();
8679 }
8680}
8681
8682
8683/** Opcode 0x0f 0xa0. */
8684FNIEMOP_DEF(iemOp_push_fs)
8685{
8686 IEMOP_MNEMONIC(push_fs, "push fs");
8687 IEMOP_HLP_MIN_386();
8688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8689 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8690}
8691
8692
8693/** Opcode 0x0f 0xa1. */
8694FNIEMOP_DEF(iemOp_pop_fs)
8695{
8696 IEMOP_MNEMONIC(pop_fs, "pop fs");
8697 IEMOP_HLP_MIN_386();
8698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8699 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8700 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
8701 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8702 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8703 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8704 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8705 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS),
8706 iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8707}
8708
8709
8710/** Opcode 0x0f 0xa2. */
8711FNIEMOP_DEF(iemOp_cpuid)
8712{
8713 IEMOP_MNEMONIC(cpuid, "cpuid");
8714 IEMOP_HLP_MIN_486(); /* not all 486es. */
8715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8716 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
8717 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8718 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
8719 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
8720 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX),
8721 iemCImpl_cpuid);
8722}
8723
8724
8725/**
8726 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8727 * iemOp_bts_Ev_Gv.
8728 */
8729
8730#define IEMOP_BODY_BIT_Ev_Gv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
8731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8732 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8733 \
8734 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8735 { \
8736 /* register destination. */ \
8737 switch (pVCpu->iem.s.enmEffOpSize) \
8738 { \
8739 case IEMMODE_16BIT: \
8740 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8742 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8743 IEM_MC_ARG(uint16_t, u16Src, 1); \
8744 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8745 \
8746 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8747 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
8748 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8749 IEM_MC_REF_EFLAGS(pEFlags); \
8750 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
8751 \
8752 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8753 IEM_MC_END(); \
8754 break; \
8755 \
8756 case IEMMODE_32BIT: \
8757 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8759 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8760 IEM_MC_ARG(uint32_t, u32Src, 1); \
8761 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8762 \
8763 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8764 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
8765 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8766 IEM_MC_REF_EFLAGS(pEFlags); \
8767 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
8768 \
8769 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8770 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8771 IEM_MC_END(); \
8772 break; \
8773 \
8774 case IEMMODE_64BIT: \
8775 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
8776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8777 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8778 IEM_MC_ARG(uint64_t, u64Src, 1); \
8779 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8780 \
8781 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8782 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
8783 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8784 IEM_MC_REF_EFLAGS(pEFlags); \
8785 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
8786 \
8787 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8788 IEM_MC_END(); \
8789 break; \
8790 \
8791 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8792 } \
8793 } \
8794 else \
8795 { \
8796 /* memory destination. */ \
8797 /** @todo test negative bit offsets! */ \
8798 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
8799 { \
8800 switch (pVCpu->iem.s.enmEffOpSize) \
8801 { \
8802 case IEMMODE_16BIT: \
8803 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8806 IEMOP_HLP_DONE_DECODING(); \
8807 \
8808 IEM_MC_ARG(uint16_t, u16Src, 1); \
8809 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8810 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
8811 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
8812 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
8813 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
8814 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
8815 \
8816 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8817 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8818 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8819 \
8820 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8821 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
8822 \
8823 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8824 IEM_MC_COMMIT_EFLAGS(EFlags); \
8825 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8826 IEM_MC_END(); \
8827 break; \
8828 \
8829 case IEMMODE_32BIT: \
8830 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8833 IEMOP_HLP_DONE_DECODING(); \
8834 \
8835 IEM_MC_ARG(uint32_t, u32Src, 1); \
8836 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8837 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
8838 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
8839 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
8840 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
8841 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
8842 \
8843 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8844 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8845 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8846 \
8847 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8848 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
8849 \
8850 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8851 IEM_MC_COMMIT_EFLAGS(EFlags); \
8852 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8853 IEM_MC_END(); \
8854 break; \
8855 \
8856 case IEMMODE_64BIT: \
8857 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
8858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8860 IEMOP_HLP_DONE_DECODING(); \
8861 \
8862 IEM_MC_ARG(uint64_t, u64Src, 1); \
8863 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8864 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
8865 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
8866 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
8867 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
8868 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
8869 \
8870 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8871 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8872 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8873 \
8874 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8875 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
8876 \
8877 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8878 IEM_MC_COMMIT_EFLAGS(EFlags); \
8879 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8880 IEM_MC_END(); \
8881 break; \
8882 \
8883 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8884 } \
8885 } \
8886 else \
8887 { \
8888 (void)0
8889/* Separate macro to work around parsing issue in IEMAllInstPython.py */
8890#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
8891 switch (pVCpu->iem.s.enmEffOpSize) \
8892 { \
8893 case IEMMODE_16BIT: \
8894 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8897 IEMOP_HLP_DONE_DECODING(); \
8898 \
8899 IEM_MC_ARG(uint16_t, u16Src, 1); \
8900 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8901 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
8902 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
8903 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
8904 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
8905 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
8906 \
8907 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8908 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8909 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8910 \
8911 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8912 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
8913 \
8914 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
8915 IEM_MC_COMMIT_EFLAGS(EFlags); \
8916 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8917 IEM_MC_END(); \
8918 break; \
8919 \
8920 case IEMMODE_32BIT: \
8921 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8924 IEMOP_HLP_DONE_DECODING(); \
8925 \
8926 IEM_MC_ARG(uint32_t, u32Src, 1); \
8927 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8928 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
8929 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
8930 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
8931 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
8932 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
8933 \
8934 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8935 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8936 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8937 \
8938 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8939 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
8940 \
8941 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
8942 IEM_MC_COMMIT_EFLAGS(EFlags); \
8943 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8944 IEM_MC_END(); \
8945 break; \
8946 \
8947 case IEMMODE_64BIT: \
8948 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
8949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8951 IEMOP_HLP_DONE_DECODING(); \
8952 \
8953 IEM_MC_ARG(uint64_t, u64Src, 1); \
8954 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8955 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
8956 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
8957 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
8958 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
8959 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
8960 \
8961 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8962 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8963 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8964 \
8965 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8966 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
8967 \
8968 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
8969 IEM_MC_COMMIT_EFLAGS(EFlags); \
8970 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8971 IEM_MC_END(); \
8972 break; \
8973 \
8974 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8975 } \
8976 } \
8977 } \
8978 (void)0
8979
8980/* Read-only version (bt). */
8981#define IEMOP_BODY_BIT_Ev_Gv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
8982 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8983 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8984 \
8985 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8986 { \
8987 /* register destination. */ \
8988 switch (pVCpu->iem.s.enmEffOpSize) \
8989 { \
8990 case IEMMODE_16BIT: \
8991 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8993 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
8994 IEM_MC_ARG(uint16_t, u16Src, 1); \
8995 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8996 \
8997 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8998 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
8999 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9000 IEM_MC_REF_EFLAGS(pEFlags); \
9001 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9002 \
9003 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9004 IEM_MC_END(); \
9005 break; \
9006 \
9007 case IEMMODE_32BIT: \
9008 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9010 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9011 IEM_MC_ARG(uint32_t, u32Src, 1); \
9012 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9013 \
9014 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9015 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9016 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9017 IEM_MC_REF_EFLAGS(pEFlags); \
9018 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9019 \
9020 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9021 IEM_MC_END(); \
9022 break; \
9023 \
9024 case IEMMODE_64BIT: \
9025 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9027 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9028 IEM_MC_ARG(uint64_t, u64Src, 1); \
9029 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9030 \
9031 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9032 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9033 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9034 IEM_MC_REF_EFLAGS(pEFlags); \
9035 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9036 \
9037 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9038 IEM_MC_END(); \
9039 break; \
9040 \
9041 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9042 } \
9043 } \
9044 else \
9045 { \
9046 /* memory destination. */ \
9047 /** @todo test negative bit offsets! */ \
9048 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9049 { \
9050 switch (pVCpu->iem.s.enmEffOpSize) \
9051 { \
9052 case IEMMODE_16BIT: \
9053 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9054 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9056 IEMOP_HLP_DONE_DECODING(); \
9057 \
9058 IEM_MC_ARG(uint16_t, u16Src, 1); \
9059 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9060 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9061 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9062 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9063 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9064 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9065 \
9066 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9067 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9068 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9069 \
9070 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9071 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9072 \
9073 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9074 IEM_MC_COMMIT_EFLAGS(EFlags); \
9075 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9076 IEM_MC_END(); \
9077 break; \
9078 \
9079 case IEMMODE_32BIT: \
9080 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9083 IEMOP_HLP_DONE_DECODING(); \
9084 \
9085 IEM_MC_ARG(uint32_t, u32Src, 1); \
9086 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9087 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9088 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9089 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9090 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9091 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9092 \
9093 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9094 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9095 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9096 \
9097 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9098 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9099 \
9100 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9101 IEM_MC_COMMIT_EFLAGS(EFlags); \
9102 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9103 IEM_MC_END(); \
9104 break; \
9105 \
9106 case IEMMODE_64BIT: \
9107 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9110 IEMOP_HLP_DONE_DECODING(); \
9111 \
9112 IEM_MC_ARG(uint64_t, u64Src, 1); \
9113 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9114 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9115 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9116 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9117 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9118 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9119 \
9120 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9121 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9122 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9123 \
9124 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9125 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9126 \
9127 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9128 IEM_MC_COMMIT_EFLAGS(EFlags); \
9129 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9130 IEM_MC_END(); \
9131 break; \
9132 \
9133 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9134 } \
9135 } \
9136 else \
9137 { \
9138 IEMOP_HLP_DONE_DECODING(); \
9139 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9140 } \
9141 } \
9142 (void)0
9143
9144
9145/**
9146 * @opcode 0xa3
9147 * @oppfx n/a
9148 * @opflclass bitmap
9149 */
9150FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9151{
9152 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9153 IEMOP_HLP_MIN_386();
9154 IEMOP_BODY_BIT_Ev_Gv_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
9155}
9156
9157
9158/**
9159 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9160 */
9161#define IEMOP_BODY_SHLD_SHR_Ib(a_pImplExpr) \
9162 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9163 \
9164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9165 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9166 \
9167 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9168 { \
9169 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9170 \
9171 switch (pVCpu->iem.s.enmEffOpSize) \
9172 { \
9173 case IEMMODE_16BIT: \
9174 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9176 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9177 IEM_MC_ARG(uint16_t, u16Src, 1); \
9178 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9179 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9180 \
9181 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9182 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9183 IEM_MC_REF_EFLAGS(pEFlags); \
9184 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9185 \
9186 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9187 IEM_MC_END(); \
9188 break; \
9189 \
9190 case IEMMODE_32BIT: \
9191 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9193 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9194 IEM_MC_ARG(uint32_t, u32Src, 1); \
9195 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9196 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9197 \
9198 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9199 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9200 IEM_MC_REF_EFLAGS(pEFlags); \
9201 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9202 \
9203 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9204 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9205 IEM_MC_END(); \
9206 break; \
9207 \
9208 case IEMMODE_64BIT: \
9209 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9211 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9212 IEM_MC_ARG(uint64_t, u64Src, 1); \
9213 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9214 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9215 \
9216 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9217 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9218 IEM_MC_REF_EFLAGS(pEFlags); \
9219 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9220 \
9221 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9222 IEM_MC_END(); \
9223 break; \
9224 \
9225 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9226 } \
9227 } \
9228 else \
9229 { \
9230 switch (pVCpu->iem.s.enmEffOpSize) \
9231 { \
9232 case IEMMODE_16BIT: \
9233 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9236 \
9237 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9239 \
9240 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9241 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9242 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9243 \
9244 IEM_MC_ARG(uint16_t, u16Src, 1); \
9245 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9246 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9247 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9248 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9249 \
9250 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9251 IEM_MC_COMMIT_EFLAGS(EFlags); \
9252 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9253 IEM_MC_END(); \
9254 break; \
9255 \
9256 case IEMMODE_32BIT: \
9257 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9260 \
9261 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9263 \
9264 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9265 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9266 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9267 \
9268 IEM_MC_ARG(uint32_t, u32Src, 1); \
9269 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9270 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9271 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9272 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9273 \
9274 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9275 IEM_MC_COMMIT_EFLAGS(EFlags); \
9276 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9277 IEM_MC_END(); \
9278 break; \
9279 \
9280 case IEMMODE_64BIT: \
9281 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9284 \
9285 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9287 \
9288 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9289 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9290 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9291 \
9292 IEM_MC_ARG(uint64_t, u64Src, 1); \
9293 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9294 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9295 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9296 \
9297 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9298 \
9299 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9300 IEM_MC_COMMIT_EFLAGS(EFlags); \
9301 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9302 IEM_MC_END(); \
9303 break; \
9304 \
9305 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9306 } \
9307 } (void)0
9308
9309
9310/**
9311 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9312 */
9313#define IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(a_pImplExpr) \
9314 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9315 \
9316 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9317 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9318 \
9319 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9320 { \
9321 switch (pVCpu->iem.s.enmEffOpSize) \
9322 { \
9323 case IEMMODE_16BIT: \
9324 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9326 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9327 IEM_MC_ARG(uint16_t, u16Src, 1); \
9328 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9329 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9330 \
9331 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9332 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9333 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9334 IEM_MC_REF_EFLAGS(pEFlags); \
9335 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9336 \
9337 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9338 IEM_MC_END(); \
9339 break; \
9340 \
9341 case IEMMODE_32BIT: \
9342 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9344 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9345 IEM_MC_ARG(uint32_t, u32Src, 1); \
9346 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9347 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9348 \
9349 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9350 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9351 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9352 IEM_MC_REF_EFLAGS(pEFlags); \
9353 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9354 \
9355 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9356 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9357 IEM_MC_END(); \
9358 break; \
9359 \
9360 case IEMMODE_64BIT: \
9361 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9363 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9364 IEM_MC_ARG(uint64_t, u64Src, 1); \
9365 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9366 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9367 \
9368 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9369 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9370 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9371 IEM_MC_REF_EFLAGS(pEFlags); \
9372 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9373 \
9374 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9375 IEM_MC_END(); \
9376 break; \
9377 \
9378 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9379 } \
9380 } \
9381 else \
9382 { \
9383 switch (pVCpu->iem.s.enmEffOpSize) \
9384 { \
9385 case IEMMODE_16BIT: \
9386 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9387 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9388 IEM_MC_ARG(uint16_t, u16Src, 1); \
9389 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9391 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9392 \
9393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9395 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9396 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9397 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9398 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9399 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9400 \
9401 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9402 IEM_MC_COMMIT_EFLAGS(EFlags); \
9403 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9404 IEM_MC_END(); \
9405 break; \
9406 \
9407 case IEMMODE_32BIT: \
9408 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9409 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9410 IEM_MC_ARG(uint32_t, u32Src, 1); \
9411 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9413 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9414 \
9415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9417 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9418 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9419 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9420 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9421 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9422 \
9423 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9424 IEM_MC_COMMIT_EFLAGS(EFlags); \
9425 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9426 IEM_MC_END(); \
9427 break; \
9428 \
9429 case IEMMODE_64BIT: \
9430 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9431 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9432 IEM_MC_ARG(uint64_t, u64Src, 1); \
9433 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9435 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9436 \
9437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9439 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9440 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9441 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9442 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9443 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9444 \
9445 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9446 IEM_MC_COMMIT_EFLAGS(EFlags); \
9447 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9448 IEM_MC_END(); \
9449 break; \
9450 \
9451 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9452 } \
9453 } (void)0
9454
9455
9456/**
9457 * @opcode 0xa4
9458 * @opflclass shift_count
9459 */
9460FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9461{
9462 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9463 IEMOP_HLP_MIN_386();
9464 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9465}
9466
9467
9468/**
9469 * @opcode 0xa5
9470 * @opflclass shift_count
9471 */
9472FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9473{
9474 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9475 IEMOP_HLP_MIN_386();
9476 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9477}
9478
9479
9480/** Opcode 0x0f 0xa8. */
9481FNIEMOP_DEF(iemOp_push_gs)
9482{
9483 IEMOP_MNEMONIC(push_gs, "push gs");
9484 IEMOP_HLP_MIN_386();
9485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9486 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9487}
9488
9489
9490/** Opcode 0x0f 0xa9. */
9491FNIEMOP_DEF(iemOp_pop_gs)
9492{
9493 IEMOP_MNEMONIC(pop_gs, "pop gs");
9494 IEMOP_HLP_MIN_386();
9495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9496 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9497 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
9498 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9499 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9500 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9501 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9502 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9503 iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9504}
9505
9506
9507/** Opcode 0x0f 0xaa. */
9508FNIEMOP_DEF(iemOp_rsm)
9509{
9510 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9511 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9513 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9514 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
9515 iemCImpl_rsm);
9516}
9517
9518
9519
9520/**
9521 * @opcode 0xab
9522 * @oppfx n/a
9523 * @opflclass bitmap
9524 */
9525FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9526{
9527 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9528 IEMOP_HLP_MIN_386();
9529 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
9530 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9531}
9532
9533
9534/**
9535 * @opcode 0xac
9536 * @opflclass shift_count
9537 */
9538FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9539{
9540 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9541 IEMOP_HLP_MIN_386();
9542 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9543}
9544
9545
9546/**
9547 * @opcode 0xad
9548 * @opflclass shift_count
9549 */
9550FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9551{
9552 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9553 IEMOP_HLP_MIN_386();
9554 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9555}
9556
9557
9558/** Opcode 0x0f 0xae mem/0. */
9559FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9560{
9561 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9562 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9563 IEMOP_RAISE_INVALID_OPCODE_RET();
9564
9565 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9566 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9569 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9570 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9571 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9572 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9573 IEM_MC_END();
9574}
9575
9576
9577/** Opcode 0x0f 0xae mem/1. */
9578FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9579{
9580 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9581 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9582 IEMOP_RAISE_INVALID_OPCODE_RET();
9583
9584 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9585 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9588 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9589 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9590 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9591 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
9592 iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9593 IEM_MC_END();
9594}
9595
9596
9597/**
9598 * @opmaps grp15
9599 * @opcode !11/2
9600 * @oppfx none
9601 * @opcpuid sse
9602 * @opgroup og_sse_mxcsrsm
9603 * @opxcpttype 5
9604 * @optest op1=0 -> mxcsr=0
9605 * @optest op1=0x2083 -> mxcsr=0x2083
9606 * @optest op1=0xfffffffe -> value.xcpt=0xd
9607 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9608 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9609 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9610 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9611 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9612 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9613 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9614 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9615 */
9616FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9617{
9618 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9619 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9620 IEMOP_RAISE_INVALID_OPCODE_RET();
9621
9622 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9623 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9626 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9627 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9628 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_MxCsr), iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9629 IEM_MC_END();
9630}
9631
9632
9633/**
9634 * @opmaps grp15
9635 * @opcode !11/3
9636 * @oppfx none
9637 * @opcpuid sse
9638 * @opgroup og_sse_mxcsrsm
9639 * @opxcpttype 5
9640 * @optest mxcsr=0 -> op1=0
9641 * @optest mxcsr=0x2083 -> op1=0x2083
9642 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9643 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9644 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9645 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9646 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9647 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9648 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9649 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9650 */
9651FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9652{
9653 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9654 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9655 IEMOP_RAISE_INVALID_OPCODE_RET();
9656
9657 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9658 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9661 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9662 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9663 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_MxCsr), iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9664 IEM_MC_END();
9665}
9666
9667
9668/**
9669 * @opmaps grp15
9670 * @opcode !11/4
9671 * @oppfx none
9672 * @opcpuid xsave
9673 * @opgroup og_system
9674 * @opxcpttype none
9675 */
9676FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9677{
9678 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9679 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9680 IEMOP_RAISE_INVALID_OPCODE_RET();
9681
9682 IEM_MC_BEGIN(IEM_MC_F_MIN_CORE, 0);
9683 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9686 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9687 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9688 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9689 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9690 IEM_MC_END();
9691}
9692
9693
9694/**
9695 * @opmaps grp15
9696 * @opcode !11/5
9697 * @oppfx none
9698 * @opcpuid xsave
9699 * @opgroup og_system
9700 * @opxcpttype none
9701 */
9702FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9703{
9704 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9705 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9706 IEMOP_RAISE_INVALID_OPCODE_RET();
9707
9708 IEM_MC_BEGIN(IEM_MC_F_MIN_CORE, 0);
9709 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9712 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9713 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9714 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9715 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
9716 iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9717 IEM_MC_END();
9718}
9719
9720/** Opcode 0x0f 0xae mem/6. */
9721FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9722
9723/**
9724 * @opmaps grp15
9725 * @opcode !11/7
9726 * @oppfx none
9727 * @opcpuid clfsh
9728 * @opgroup og_cachectl
9729 * @optest op1=1 ->
9730 */
9731FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9732{
9733 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9734 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9735 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9736
9737 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9738 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9741 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9742 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9743 IEM_MC_END();
9744}
9745
9746/**
9747 * @opmaps grp15
9748 * @opcode !11/7
9749 * @oppfx 0x66
9750 * @opcpuid clflushopt
9751 * @opgroup og_cachectl
9752 * @optest op1=1 ->
9753 */
9754FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9755{
9756 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9757 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9758 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9759
9760 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9761 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9764 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9765 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9766 IEM_MC_END();
9767}
9768
9769
9770/** Opcode 0x0f 0xae 11b/5. */
9771FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9772{
9773 RT_NOREF_PV(bRm);
9774 IEMOP_MNEMONIC(lfence, "lfence");
9775 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9777#ifdef RT_ARCH_ARM64
9778 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9779#else
9780 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9781 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9782 else
9783 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9784#endif
9785 IEM_MC_ADVANCE_RIP_AND_FINISH();
9786 IEM_MC_END();
9787}
9788
9789
9790/** Opcode 0x0f 0xae 11b/6. */
9791FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9792{
9793 RT_NOREF_PV(bRm);
9794 IEMOP_MNEMONIC(mfence, "mfence");
9795 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9797#ifdef RT_ARCH_ARM64
9798 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9799#else
9800 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9801 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9802 else
9803 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9804#endif
9805 IEM_MC_ADVANCE_RIP_AND_FINISH();
9806 IEM_MC_END();
9807}
9808
9809
9810/** Opcode 0x0f 0xae 11b/7. */
9811FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9812{
9813 RT_NOREF_PV(bRm);
9814 IEMOP_MNEMONIC(sfence, "sfence");
9815 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9817#ifdef RT_ARCH_ARM64
9818 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9819#else
9820 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9821 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9822 else
9823 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9824#endif
9825 IEM_MC_ADVANCE_RIP_AND_FINISH();
9826 IEM_MC_END();
9827}
9828
9829
9830/** Opcode 0xf3 0x0f 0xae 11b/0. */
9831FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9832{
9833 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9834 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9835 {
9836 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9838 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9839 IEM_MC_LOCAL(uint64_t, u64Dst);
9840 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9841 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9842 IEM_MC_ADVANCE_RIP_AND_FINISH();
9843 IEM_MC_END();
9844 }
9845 else
9846 {
9847 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9849 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9850 IEM_MC_LOCAL(uint32_t, u32Dst);
9851 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
9852 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9853 IEM_MC_ADVANCE_RIP_AND_FINISH();
9854 IEM_MC_END();
9855 }
9856}
9857
9858
9859/** Opcode 0xf3 0x0f 0xae 11b/1. */
9860FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
9861{
9862 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
9863 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9864 {
9865 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9867 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9868 IEM_MC_LOCAL(uint64_t, u64Dst);
9869 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
9870 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9871 IEM_MC_ADVANCE_RIP_AND_FINISH();
9872 IEM_MC_END();
9873 }
9874 else
9875 {
9876 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9878 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9879 IEM_MC_LOCAL(uint32_t, u32Dst);
9880 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
9881 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9882 IEM_MC_ADVANCE_RIP_AND_FINISH();
9883 IEM_MC_END();
9884 }
9885}
9886
9887
9888/** Opcode 0xf3 0x0f 0xae 11b/2. */
9889FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
9890{
9891 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
9892 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9893 {
9894 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9896 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9897 IEM_MC_LOCAL(uint64_t, u64Dst);
9898 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9899 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9900 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
9901 IEM_MC_ADVANCE_RIP_AND_FINISH();
9902 IEM_MC_END();
9903 }
9904 else
9905 {
9906 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9908 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9909 IEM_MC_LOCAL(uint32_t, u32Dst);
9910 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9911 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
9912 IEM_MC_ADVANCE_RIP_AND_FINISH();
9913 IEM_MC_END();
9914 }
9915}
9916
9917
9918/** Opcode 0xf3 0x0f 0xae 11b/3. */
9919FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
9920{
9921 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
9922 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9923 {
9924 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9926 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9927 IEM_MC_LOCAL(uint64_t, u64Dst);
9928 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9929 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9930 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
9931 IEM_MC_ADVANCE_RIP_AND_FINISH();
9932 IEM_MC_END();
9933 }
9934 else
9935 {
9936 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9938 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9939 IEM_MC_LOCAL(uint32_t, u32Dst);
9940 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9941 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
9942 IEM_MC_ADVANCE_RIP_AND_FINISH();
9943 IEM_MC_END();
9944 }
9945}
9946
9947
9948/**
9949 * Group 15 jump table for register variant.
9950 */
9951IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
9952{ /* pfx: none, 066h, 0f3h, 0f2h */
9953 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
9954 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
9955 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
9956 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
9957 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9958 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9959 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9960 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9961};
9962AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
9963
9964
9965/**
9966 * Group 15 jump table for memory variant.
9967 */
9968IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
9969{ /* pfx: none, 066h, 0f3h, 0f2h */
9970 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9971 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9972 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9973 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9974 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9975 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9976 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9977 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9978};
9979AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
9980
9981
9982/** Opcode 0x0f 0xae. */
9983FNIEMOP_DEF(iemOp_Grp15)
9984{
9985 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
9986 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9987 if (IEM_IS_MODRM_REG_MODE(bRm))
9988 /* register, register */
9989 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9990 + pVCpu->iem.s.idxPrefix], bRm);
9991 /* memory, register */
9992 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9993 + pVCpu->iem.s.idxPrefix], bRm);
9994}
9995
9996
9997/**
9998 * @opcode 0xaf
9999 * @opflclass multiply
10000 */
10001FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10002{
10003 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10004 IEMOP_HLP_MIN_386();
10005 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10006 const IEMOPBINTODOSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10008 IEMOP_BODY_BINARY_TODO_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_MIN_386, imul, 0);
10009}
10010
10011
10012/**
10013 * @opcode 0xb0
10014 * @opflclass arithmetic
10015 */
10016FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10017{
10018 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10019 IEMOP_HLP_MIN_486();
10020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10021
10022 if (IEM_IS_MODRM_REG_MODE(bRm))
10023 {
10024 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10026 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10027 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10028 IEM_MC_ARG(uint8_t, u8Src, 2);
10029 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10030
10031 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10032 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10033 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10034 IEM_MC_REF_EFLAGS(pEFlags);
10035 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10036
10037 IEM_MC_ADVANCE_RIP_AND_FINISH();
10038 IEM_MC_END();
10039 }
10040 else
10041 {
10042#define IEMOP_BODY_CMPXCHG_BYTE(a_fnWorker, a_Type) \
10043 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10046 IEMOP_HLP_DONE_DECODING(); \
10047 \
10048 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10049 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
10050 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10051 \
10052 IEM_MC_ARG(uint8_t, u8Src, 2); \
10053 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10054 \
10055 IEM_MC_LOCAL(uint8_t, u8Al); \
10056 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX); \
10057 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Al, u8Al, 1); \
10058 \
10059 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10060 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu8Dst, pu8Al, u8Src, pEFlags); \
10061 \
10062 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10063 IEM_MC_COMMIT_EFLAGS(EFlags); \
10064 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al); \
10065 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10066 IEM_MC_END()
10067
10068 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10069 {
10070 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8,RW);
10071 }
10072 else
10073 {
10074 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8_locked,ATOMIC);
10075 }
10076 }
10077}
10078
10079/**
10080 * @opcode 0xb1
10081 * @opflclass arithmetic
10082 */
10083FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10084{
10085 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10086 IEMOP_HLP_MIN_486();
10087 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10088
10089 if (IEM_IS_MODRM_REG_MODE(bRm))
10090 {
10091 switch (pVCpu->iem.s.enmEffOpSize)
10092 {
10093 case IEMMODE_16BIT:
10094 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10096 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10097 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10098 IEM_MC_ARG(uint16_t, u16Src, 2);
10099 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10100
10101 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10102 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10103 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10104 IEM_MC_REF_EFLAGS(pEFlags);
10105 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10106
10107 IEM_MC_ADVANCE_RIP_AND_FINISH();
10108 IEM_MC_END();
10109 break;
10110
10111 case IEMMODE_32BIT:
10112 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10114 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10115 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10116 IEM_MC_ARG(uint32_t, u32Src, 2);
10117 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10118
10119 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10120 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10121 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10122 IEM_MC_REF_EFLAGS(pEFlags);
10123 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10124
10125 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10126 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
10127 } IEM_MC_ELSE() {
10128 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
10129 } IEM_MC_ENDIF();
10130
10131 IEM_MC_ADVANCE_RIP_AND_FINISH();
10132 IEM_MC_END();
10133 break;
10134
10135 case IEMMODE_64BIT:
10136 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10138 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10139 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10140 IEM_MC_ARG(uint64_t, u64Src, 2);
10141 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10142
10143 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10144 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10145 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10146 IEM_MC_REF_EFLAGS(pEFlags);
10147 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10148
10149 IEM_MC_ADVANCE_RIP_AND_FINISH();
10150 IEM_MC_END();
10151 break;
10152
10153 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10154 }
10155 }
10156 else
10157 {
10158#define IEMOP_BODY_CMPXCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64,a_Type) \
10159 do { \
10160 switch (pVCpu->iem.s.enmEffOpSize) \
10161 { \
10162 case IEMMODE_16BIT: \
10163 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10164 \
10165 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10168 IEMOP_HLP_DONE_DECODING(); \
10169 \
10170 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10171 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10172 \
10173 IEM_MC_ARG(uint16_t, u16Src, 2); \
10174 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10175 \
10176 IEM_MC_LOCAL(uint16_t, u16Ax); \
10177 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX); \
10178 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Ax, u16Ax, 1); \
10179 \
10180 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10181 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker16, pu16Dst, pu16Ax, u16Src, pEFlags); \
10182 \
10183 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10184 IEM_MC_COMMIT_EFLAGS(EFlags); \
10185 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax); \
10186 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10187 IEM_MC_END(); \
10188 break; \
10189 \
10190 case IEMMODE_32BIT: \
10191 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10194 IEMOP_HLP_DONE_DECODING(); \
10195 \
10196 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10197 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10198 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10199 \
10200 IEM_MC_ARG(uint32_t, u32Src, 2); \
10201 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10202 \
10203 IEM_MC_LOCAL(uint32_t, u32Eax); \
10204 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX); \
10205 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Eax, u32Eax, 1); \
10206 \
10207 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10208 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker32, pu32Dst, pu32Eax, u32Src, pEFlags); \
10209 \
10210 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10211 IEM_MC_COMMIT_EFLAGS(EFlags); \
10212 \
10213 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
10214 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax); \
10215 } IEM_MC_ENDIF(); \
10216 \
10217 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10218 IEM_MC_END(); \
10219 break; \
10220 \
10221 case IEMMODE_64BIT: \
10222 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10225 IEMOP_HLP_DONE_DECODING(); \
10226 \
10227 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10228 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10229 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10230 \
10231 IEM_MC_ARG(uint64_t, u64Src, 2); \
10232 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10233 \
10234 IEM_MC_LOCAL(uint64_t, u64Rax); \
10235 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX); \
10236 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Rax, u64Rax, 1); \
10237 \
10238 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10239 \
10240 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker64, pu64Dst, pu64Rax, u64Src, pEFlags); \
10241 \
10242 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10243 IEM_MC_COMMIT_EFLAGS(EFlags); \
10244 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax); \
10245 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10246 IEM_MC_END(); \
10247 break; \
10248 \
10249 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10250 } \
10251 } while (0)
10252
10253 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10254 {
10255 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16, iemAImpl_cmpxchg_u32, iemAImpl_cmpxchg_u64,RW);
10256 }
10257 else
10258 {
10259 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16_locked, iemAImpl_cmpxchg_u32_locked, iemAImpl_cmpxchg_u64_locked,ATOMIC);
10260 }
10261 }
10262}
10263
10264
10265/** Opcode 0x0f 0xb2. */
10266FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10267{
10268 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10269 IEMOP_HLP_MIN_386();
10270 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10271 if (IEM_IS_MODRM_REG_MODE(bRm))
10272 IEMOP_RAISE_INVALID_OPCODE_RET();
10273 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10274}
10275
10276
10277/**
10278 * @opcode 0xb3
10279 * @oppfx n/a
10280 * @opflclass bitmap
10281 */
10282FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10283{
10284 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10285 IEMOP_HLP_MIN_386();
10286 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
10287 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10288}
10289
10290
10291/** Opcode 0x0f 0xb4. */
10292FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10293{
10294 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10295 IEMOP_HLP_MIN_386();
10296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10297 if (IEM_IS_MODRM_REG_MODE(bRm))
10298 IEMOP_RAISE_INVALID_OPCODE_RET();
10299 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10300}
10301
10302
10303/** Opcode 0x0f 0xb5. */
10304FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10305{
10306 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10307 IEMOP_HLP_MIN_386();
10308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10309 if (IEM_IS_MODRM_REG_MODE(bRm))
10310 IEMOP_RAISE_INVALID_OPCODE_RET();
10311 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10312}
10313
10314
10315/** Opcode 0x0f 0xb6. */
10316FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10317{
10318 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10319 IEMOP_HLP_MIN_386();
10320
10321 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10322
10323 /*
10324 * If rm is denoting a register, no more instruction bytes.
10325 */
10326 if (IEM_IS_MODRM_REG_MODE(bRm))
10327 {
10328 switch (pVCpu->iem.s.enmEffOpSize)
10329 {
10330 case IEMMODE_16BIT:
10331 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10333 IEM_MC_LOCAL(uint16_t, u16Value);
10334 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10335 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10336 IEM_MC_ADVANCE_RIP_AND_FINISH();
10337 IEM_MC_END();
10338 break;
10339
10340 case IEMMODE_32BIT:
10341 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10343 IEM_MC_LOCAL(uint32_t, u32Value);
10344 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10345 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10346 IEM_MC_ADVANCE_RIP_AND_FINISH();
10347 IEM_MC_END();
10348 break;
10349
10350 case IEMMODE_64BIT:
10351 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10353 IEM_MC_LOCAL(uint64_t, u64Value);
10354 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10355 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10356 IEM_MC_ADVANCE_RIP_AND_FINISH();
10357 IEM_MC_END();
10358 break;
10359
10360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10361 }
10362 }
10363 else
10364 {
10365 /*
10366 * We're loading a register from memory.
10367 */
10368 switch (pVCpu->iem.s.enmEffOpSize)
10369 {
10370 case IEMMODE_16BIT:
10371 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10372 IEM_MC_LOCAL(uint16_t, u16Value);
10373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10374 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10376 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10377 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10378 IEM_MC_ADVANCE_RIP_AND_FINISH();
10379 IEM_MC_END();
10380 break;
10381
10382 case IEMMODE_32BIT:
10383 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10384 IEM_MC_LOCAL(uint32_t, u32Value);
10385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10388 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10389 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10390 IEM_MC_ADVANCE_RIP_AND_FINISH();
10391 IEM_MC_END();
10392 break;
10393
10394 case IEMMODE_64BIT:
10395 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10396 IEM_MC_LOCAL(uint64_t, u64Value);
10397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10400 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10401 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10402 IEM_MC_ADVANCE_RIP_AND_FINISH();
10403 IEM_MC_END();
10404 break;
10405
10406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10407 }
10408 }
10409}
10410
10411
10412/** Opcode 0x0f 0xb7. */
10413FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10414{
10415 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10416 IEMOP_HLP_MIN_386();
10417
10418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10419
10420 /** @todo Not entirely sure how the operand size prefix is handled here,
10421 * assuming that it will be ignored. Would be nice to have a few
10422 * test for this. */
10423
10424 /** @todo There should be no difference in the behaviour whether REX.W is
10425 * present or not... */
10426
10427 /*
10428 * If rm is denoting a register, no more instruction bytes.
10429 */
10430 if (IEM_IS_MODRM_REG_MODE(bRm))
10431 {
10432 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10433 {
10434 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10436 IEM_MC_LOCAL(uint32_t, u32Value);
10437 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10438 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10439 IEM_MC_ADVANCE_RIP_AND_FINISH();
10440 IEM_MC_END();
10441 }
10442 else
10443 {
10444 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10446 IEM_MC_LOCAL(uint64_t, u64Value);
10447 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10448 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10449 IEM_MC_ADVANCE_RIP_AND_FINISH();
10450 IEM_MC_END();
10451 }
10452 }
10453 else
10454 {
10455 /*
10456 * We're loading a register from memory.
10457 */
10458 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10459 {
10460 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10461 IEM_MC_LOCAL(uint32_t, u32Value);
10462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10465 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10466 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10467 IEM_MC_ADVANCE_RIP_AND_FINISH();
10468 IEM_MC_END();
10469 }
10470 else
10471 {
10472 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10473 IEM_MC_LOCAL(uint64_t, u64Value);
10474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10477 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10478 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10479 IEM_MC_ADVANCE_RIP_AND_FINISH();
10480 IEM_MC_END();
10481 }
10482 }
10483}
10484
10485
10486/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10487FNIEMOP_UD_STUB(iemOp_jmpe);
10488
10489
10490/**
10491 * @opcode 0xb8
10492 * @oppfx 0xf3
10493 * @opflmodify cf,pf,af,zf,sf,of
10494 * @opflclear cf,pf,af,sf,of
10495 */
10496FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10497{
10498 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10499 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10500 return iemOp_InvalidNeedRM(pVCpu);
10501#ifndef TST_IEM_CHECK_MC
10502# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10503 static const IEMOPBINTODOSIZES s_Native =
10504 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10505# endif
10506 static const IEMOPBINTODOSIZES s_Fallback =
10507 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10508#endif
10509 const IEMOPBINTODOSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10511 IEMOP_BODY_BINARY_TODO_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, popcnt, 0);
10512}
10513
10514
10515/**
10516 * @opcode 0xb9
10517 * @opinvalid intel-modrm
10518 * @optest ->
10519 */
10520FNIEMOP_DEF(iemOp_Grp10)
10521{
10522 /*
10523 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10524 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10525 */
10526 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10527 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10528 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10529}
10530
10531
10532/**
10533 * Body for group 8 bit instruction.
10534 */
10535#define IEMOP_BODY_BIT_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10536 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10537 \
10538 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10539 { \
10540 /* register destination. */ \
10541 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10542 \
10543 switch (pVCpu->iem.s.enmEffOpSize) \
10544 { \
10545 case IEMMODE_16BIT: \
10546 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10548 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10549 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10550 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10551 \
10552 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10553 IEM_MC_REF_EFLAGS(pEFlags); \
10554 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10555 \
10556 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10557 IEM_MC_END(); \
10558 break; \
10559 \
10560 case IEMMODE_32BIT: \
10561 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10563 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10564 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10565 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10566 \
10567 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10568 IEM_MC_REF_EFLAGS(pEFlags); \
10569 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10570 \
10571 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
10572 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10573 IEM_MC_END(); \
10574 break; \
10575 \
10576 case IEMMODE_64BIT: \
10577 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10579 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10580 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10581 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10582 \
10583 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10584 IEM_MC_REF_EFLAGS(pEFlags); \
10585 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10586 \
10587 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10588 IEM_MC_END(); \
10589 break; \
10590 \
10591 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10592 } \
10593 } \
10594 else \
10595 { \
10596 /* memory destination. */ \
10597 /** @todo test negative bit offsets! */ \
10598 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
10599 { \
10600 switch (pVCpu->iem.s.enmEffOpSize) \
10601 { \
10602 case IEMMODE_16BIT: \
10603 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10606 \
10607 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10608 IEMOP_HLP_DONE_DECODING(); \
10609 \
10610 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10611 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10612 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10613 \
10614 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10615 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10616 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10617 \
10618 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10619 IEM_MC_COMMIT_EFLAGS(EFlags); \
10620 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10621 IEM_MC_END(); \
10622 break; \
10623 \
10624 case IEMMODE_32BIT: \
10625 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10628 \
10629 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10630 IEMOP_HLP_DONE_DECODING(); \
10631 \
10632 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10633 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10634 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10635 \
10636 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10637 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10638 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10639 \
10640 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10641 IEM_MC_COMMIT_EFLAGS(EFlags); \
10642 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10643 IEM_MC_END(); \
10644 break; \
10645 \
10646 case IEMMODE_64BIT: \
10647 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10650 \
10651 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10652 IEMOP_HLP_DONE_DECODING(); \
10653 \
10654 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10655 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10656 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10657 \
10658 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10659 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10660 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10661 \
10662 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10663 IEM_MC_COMMIT_EFLAGS(EFlags); \
10664 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10665 IEM_MC_END(); \
10666 break; \
10667 \
10668 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10669 } \
10670 } \
10671 else \
10672 { \
10673 (void)0
10674/* Separate macro to work around parsing issue in IEMAllInstPython.py */
10675#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10676 switch (pVCpu->iem.s.enmEffOpSize) \
10677 { \
10678 case IEMMODE_16BIT: \
10679 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10682 \
10683 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10684 IEMOP_HLP_DONE_DECODING(); \
10685 \
10686 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10687 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10688 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10689 \
10690 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10691 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10692 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
10693 \
10694 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10695 IEM_MC_COMMIT_EFLAGS(EFlags); \
10696 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10697 IEM_MC_END(); \
10698 break; \
10699 \
10700 case IEMMODE_32BIT: \
10701 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10702 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10704 \
10705 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10706 IEMOP_HLP_DONE_DECODING(); \
10707 \
10708 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10709 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10710 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10711 \
10712 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10713 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10714 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
10715 \
10716 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10717 IEM_MC_COMMIT_EFLAGS(EFlags); \
10718 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10719 IEM_MC_END(); \
10720 break; \
10721 \
10722 case IEMMODE_64BIT: \
10723 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10726 \
10727 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10728 IEMOP_HLP_DONE_DECODING(); \
10729 \
10730 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10731 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10732 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10733 \
10734 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10735 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10736 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
10737 \
10738 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10739 IEM_MC_COMMIT_EFLAGS(EFlags); \
10740 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10741 IEM_MC_END(); \
10742 break; \
10743 \
10744 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10745 } \
10746 } \
10747 } \
10748 (void)0
10749
10750/* Read-only version (bt) */
10751#define IEMOP_BODY_BIT_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10752 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10753 \
10754 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10755 { \
10756 /* register destination. */ \
10757 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10758 \
10759 switch (pVCpu->iem.s.enmEffOpSize) \
10760 { \
10761 case IEMMODE_16BIT: \
10762 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10764 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
10765 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10766 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10767 \
10768 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10769 IEM_MC_REF_EFLAGS(pEFlags); \
10770 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10771 \
10772 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10773 IEM_MC_END(); \
10774 break; \
10775 \
10776 case IEMMODE_32BIT: \
10777 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10779 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
10780 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10781 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10782 \
10783 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10784 IEM_MC_REF_EFLAGS(pEFlags); \
10785 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10786 \
10787 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10788 IEM_MC_END(); \
10789 break; \
10790 \
10791 case IEMMODE_64BIT: \
10792 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10794 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
10795 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10796 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10797 \
10798 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10799 IEM_MC_REF_EFLAGS(pEFlags); \
10800 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10801 \
10802 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10803 IEM_MC_END(); \
10804 break; \
10805 \
10806 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10807 } \
10808 } \
10809 else \
10810 { \
10811 /* memory destination. */ \
10812 /** @todo test negative bit offsets! */ \
10813 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10814 { \
10815 switch (pVCpu->iem.s.enmEffOpSize) \
10816 { \
10817 case IEMMODE_16BIT: \
10818 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10821 \
10822 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10823 IEMOP_HLP_DONE_DECODING(); \
10824 \
10825 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10826 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
10827 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10828 \
10829 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10830 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10831 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10832 \
10833 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
10834 IEM_MC_COMMIT_EFLAGS(EFlags); \
10835 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10836 IEM_MC_END(); \
10837 break; \
10838 \
10839 case IEMMODE_32BIT: \
10840 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10843 \
10844 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10845 IEMOP_HLP_DONE_DECODING(); \
10846 \
10847 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10848 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
10849 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10850 \
10851 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10852 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10853 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10854 \
10855 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
10856 IEM_MC_COMMIT_EFLAGS(EFlags); \
10857 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10858 IEM_MC_END(); \
10859 break; \
10860 \
10861 case IEMMODE_64BIT: \
10862 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10865 \
10866 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10867 IEMOP_HLP_DONE_DECODING(); \
10868 \
10869 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10870 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
10871 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10872 \
10873 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10874 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10875 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10876 \
10877 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
10878 IEM_MC_COMMIT_EFLAGS(EFlags); \
10879 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10880 IEM_MC_END(); \
10881 break; \
10882 \
10883 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10884 } \
10885 } \
10886 else \
10887 { \
10888 IEMOP_HLP_DONE_DECODING(); \
10889 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
10890 } \
10891 } \
10892 (void)0
10893
10894
10895/**
10896 * @opmaps grp8
10897 * @opcode /4
10898 * @oppfx n/a
10899 * @opflclass bitmap
10900 */
10901FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
10902{
10903 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
10904 IEMOP_BODY_BIT_Ev_Ib_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
10905}
10906
10907
10908/**
10909 * @opmaps grp8
10910 * @opcode /5
10911 * @oppfx n/a
10912 * @opflclass bitmap
10913 */
10914FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
10915{
10916 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
10917 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
10918 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
10919}
10920
10921
10922/**
10923 * @opmaps grp8
10924 * @opcode /6
10925 * @oppfx n/a
10926 * @opflclass bitmap
10927 */
10928FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
10929{
10930 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
10931 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
10932 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10933}
10934
10935
10936/**
10937 * @opmaps grp8
10938 * @opcode /7
10939 * @oppfx n/a
10940 * @opflclass bitmap
10941 */
10942FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
10943{
10944 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
10945 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
10946 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
10947}
10948
10949
10950/** Opcode 0x0f 0xba. */
10951FNIEMOP_DEF(iemOp_Grp8)
10952{
10953 IEMOP_HLP_MIN_386();
10954 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10955 switch (IEM_GET_MODRM_REG_8(bRm))
10956 {
10957 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
10958 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
10959 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
10960 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
10961
10962 case 0: case 1: case 2: case 3:
10963 /* Both AMD and Intel want full modr/m decoding and imm8. */
10964 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
10965
10966 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10967 }
10968}
10969
10970
10971/**
10972 * @opcode 0xbb
10973 * @oppfx n/a
10974 * @opflclass bitmap
10975 */
10976FNIEMOP_DEF(iemOp_btc_Ev_Gv)
10977{
10978 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
10979 IEMOP_HLP_MIN_386();
10980 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
10981 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
10982}
10983
10984
10985/**
10986 * Body for BSF and BSR instructions.
10987 *
10988 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
10989 * the destination register, which means that for 32-bit operations the high
10990 * bits must be left alone.
10991 *
10992 * @param pImpl Pointer to the instruction implementation (assembly).
10993 */
10994#define IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl) \
10995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
10996 \
10997 /* \
10998 * If rm is denoting a register, no more instruction bytes. \
10999 */ \
11000 if (IEM_IS_MODRM_REG_MODE(bRm)) \
11001 { \
11002 switch (pVCpu->iem.s.enmEffOpSize) \
11003 { \
11004 case IEMMODE_16BIT: \
11005 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11007 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11008 IEM_MC_ARG(uint16_t, u16Src, 1); \
11009 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11010 \
11011 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11012 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11013 IEM_MC_REF_EFLAGS(pEFlags); \
11014 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags); \
11015 \
11016 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11017 IEM_MC_END(); \
11018 break; \
11019 \
11020 case IEMMODE_32BIT: \
11021 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11023 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11024 IEM_MC_ARG(uint32_t, u32Src, 1); \
11025 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11026 \
11027 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11028 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11029 IEM_MC_REF_EFLAGS(pEFlags); \
11030 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags); \
11031 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11032 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11033 } IEM_MC_ENDIF(); \
11034 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11035 IEM_MC_END(); \
11036 break; \
11037 \
11038 case IEMMODE_64BIT: \
11039 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11041 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11042 IEM_MC_ARG(uint64_t, u64Src, 1); \
11043 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11044 \
11045 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11046 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11047 IEM_MC_REF_EFLAGS(pEFlags); \
11048 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags); \
11049 \
11050 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11051 IEM_MC_END(); \
11052 break; \
11053 \
11054 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11055 } \
11056 } \
11057 else \
11058 { \
11059 /* \
11060 * We're accessing memory. \
11061 */ \
11062 switch (pVCpu->iem.s.enmEffOpSize) \
11063 { \
11064 case IEMMODE_16BIT: \
11065 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11066 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11067 IEM_MC_ARG(uint16_t, u16Src, 1); \
11068 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11069 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11070 \
11071 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11073 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11074 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11075 IEM_MC_REF_EFLAGS(pEFlags); \
11076 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags); \
11077 \
11078 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11079 IEM_MC_END(); \
11080 break; \
11081 \
11082 case IEMMODE_32BIT: \
11083 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11084 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11085 IEM_MC_ARG(uint32_t, u32Src, 1); \
11086 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11088 \
11089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11091 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11092 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11093 IEM_MC_REF_EFLAGS(pEFlags); \
11094 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags); \
11095 \
11096 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11097 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11098 } IEM_MC_ENDIF(); \
11099 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11100 IEM_MC_END(); \
11101 break; \
11102 \
11103 case IEMMODE_64BIT: \
11104 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11105 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11106 IEM_MC_ARG(uint64_t, u64Src, 1); \
11107 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11109 \
11110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11112 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11113 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11114 IEM_MC_REF_EFLAGS(pEFlags); \
11115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags); \
11116 \
11117 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11118 IEM_MC_END(); \
11119 break; \
11120 \
11121 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11122 } \
11123 } (void)0
11124
11125
11126/**
11127 * @opcode 0xbc
11128 * @oppfx !0xf3
11129 * @opfltest cf,pf,af,sf,of
11130 * @opflmodify cf,pf,af,zf,sf,of
11131 * @opflundef cf,pf,af,sf,of
11132 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11133 * document them as inputs. Sigh.
11134 */
11135FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11136{
11137 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11138 IEMOP_HLP_MIN_386();
11139 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11140 PCIEMOPBINTODOSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags);
11141 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11142}
11143
11144
11145/**
11146 * @opcode 0xbc
11147 * @oppfx 0xf3
11148 * @opfltest pf,af,sf,of
11149 * @opflmodify cf,pf,af,zf,sf,of
11150 * @opflundef pf,af,sf,of
11151 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11152 * document them as inputs. Sigh.
11153 */
11154FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11155{
11156 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11157 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11158 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11159
11160#ifndef TST_IEM_CHECK_MC
11161 static const IEMOPBINTODOSIZES s_iemAImpl_tzcnt =
11162 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11163 static const IEMOPBINTODOSIZES s_iemAImpl_tzcnt_amd =
11164 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11165 static const IEMOPBINTODOSIZES s_iemAImpl_tzcnt_intel =
11166 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11167 static const IEMOPBINTODOSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11168 {
11169 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11170 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11171 };
11172#endif
11173 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11174 const IEMOPBINTODOSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11175 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11176 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11177 IEMOP_BODY_BINARY_TODO_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, tzcnt, 0);
11178}
11179
11180
11181/**
11182 * @opcode 0xbd
11183 * @oppfx !0xf3
11184 * @opfltest cf,pf,af,sf,of
11185 * @opflmodify cf,pf,af,zf,sf,of
11186 * @opflundef cf,pf,af,sf,of
11187 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11188 * document them as inputs. Sigh.
11189 */
11190FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11191{
11192 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11193 IEMOP_HLP_MIN_386();
11194 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11195 PCIEMOPBINTODOSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags);
11196 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11197}
11198
11199
11200/**
11201 * @opcode 0xbd
11202 * @oppfx 0xf3
11203 * @opfltest pf,af,sf,of
11204 * @opflmodify cf,pf,af,zf,sf,of
11205 * @opflundef pf,af,sf,of
11206 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11207 * document them as inputs. Sigh.
11208 */
11209FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11210{
11211 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAbm)
11212 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11213 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11214
11215#ifndef TST_IEM_CHECK_MC
11216 static const IEMOPBINTODOSIZES s_iemAImpl_lzcnt =
11217 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11218 static const IEMOPBINTODOSIZES s_iemAImpl_lzcnt_amd =
11219 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11220 static const IEMOPBINTODOSIZES s_iemAImpl_lzcnt_intel =
11221 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11222 static const IEMOPBINTODOSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11223 {
11224 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11225 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11226 };
11227#endif
11228 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11229 const IEMOPBINTODOSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11230 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11232 IEMOP_BODY_BINARY_TODO_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, lzcnt, 0);
11233}
11234
11235
11236
11237/** Opcode 0x0f 0xbe. */
11238FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11239{
11240 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11241 IEMOP_HLP_MIN_386();
11242
11243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11244
11245 /*
11246 * If rm is denoting a register, no more instruction bytes.
11247 */
11248 if (IEM_IS_MODRM_REG_MODE(bRm))
11249 {
11250 switch (pVCpu->iem.s.enmEffOpSize)
11251 {
11252 case IEMMODE_16BIT:
11253 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11255 IEM_MC_LOCAL(uint16_t, u16Value);
11256 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11257 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11258 IEM_MC_ADVANCE_RIP_AND_FINISH();
11259 IEM_MC_END();
11260 break;
11261
11262 case IEMMODE_32BIT:
11263 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11265 IEM_MC_LOCAL(uint32_t, u32Value);
11266 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11267 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11268 IEM_MC_ADVANCE_RIP_AND_FINISH();
11269 IEM_MC_END();
11270 break;
11271
11272 case IEMMODE_64BIT:
11273 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11275 IEM_MC_LOCAL(uint64_t, u64Value);
11276 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11277 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11278 IEM_MC_ADVANCE_RIP_AND_FINISH();
11279 IEM_MC_END();
11280 break;
11281
11282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11283 }
11284 }
11285 else
11286 {
11287 /*
11288 * We're loading a register from memory.
11289 */
11290 switch (pVCpu->iem.s.enmEffOpSize)
11291 {
11292 case IEMMODE_16BIT:
11293 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11294 IEM_MC_LOCAL(uint16_t, u16Value);
11295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11298 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11299 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11300 IEM_MC_ADVANCE_RIP_AND_FINISH();
11301 IEM_MC_END();
11302 break;
11303
11304 case IEMMODE_32BIT:
11305 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11306 IEM_MC_LOCAL(uint32_t, u32Value);
11307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11310 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11311 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11312 IEM_MC_ADVANCE_RIP_AND_FINISH();
11313 IEM_MC_END();
11314 break;
11315
11316 case IEMMODE_64BIT:
11317 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11318 IEM_MC_LOCAL(uint64_t, u64Value);
11319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11322 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11323 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11324 IEM_MC_ADVANCE_RIP_AND_FINISH();
11325 IEM_MC_END();
11326 break;
11327
11328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11329 }
11330 }
11331}
11332
11333
11334/** Opcode 0x0f 0xbf. */
11335FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11336{
11337 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11338 IEMOP_HLP_MIN_386();
11339
11340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11341
11342 /** @todo Not entirely sure how the operand size prefix is handled here,
11343 * assuming that it will be ignored. Would be nice to have a few
11344 * test for this. */
11345 /*
11346 * If rm is denoting a register, no more instruction bytes.
11347 */
11348 if (IEM_IS_MODRM_REG_MODE(bRm))
11349 {
11350 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11351 {
11352 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11354 IEM_MC_LOCAL(uint32_t, u32Value);
11355 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11356 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11357 IEM_MC_ADVANCE_RIP_AND_FINISH();
11358 IEM_MC_END();
11359 }
11360 else
11361 {
11362 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11364 IEM_MC_LOCAL(uint64_t, u64Value);
11365 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11366 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11367 IEM_MC_ADVANCE_RIP_AND_FINISH();
11368 IEM_MC_END();
11369 }
11370 }
11371 else
11372 {
11373 /*
11374 * We're loading a register from memory.
11375 */
11376 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11377 {
11378 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11379 IEM_MC_LOCAL(uint32_t, u32Value);
11380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11383 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11384 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11385 IEM_MC_ADVANCE_RIP_AND_FINISH();
11386 IEM_MC_END();
11387 }
11388 else
11389 {
11390 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11391 IEM_MC_LOCAL(uint64_t, u64Value);
11392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11395 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11396 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11397 IEM_MC_ADVANCE_RIP_AND_FINISH();
11398 IEM_MC_END();
11399 }
11400 }
11401}
11402
11403
11404/**
11405 * @opcode 0xc0
11406 * @opflclass arithmetic
11407 */
11408FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11409{
11410 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11411 IEMOP_HLP_MIN_486();
11412 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11413
11414 /*
11415 * If rm is denoting a register, no more instruction bytes.
11416 */
11417 if (IEM_IS_MODRM_REG_MODE(bRm))
11418 {
11419 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11421 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11422 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11423 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11424
11425 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11426 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11427 IEM_MC_REF_EFLAGS(pEFlags);
11428 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11429
11430 IEM_MC_ADVANCE_RIP_AND_FINISH();
11431 IEM_MC_END();
11432 }
11433 else
11434 {
11435 /*
11436 * We're accessing memory.
11437 */
11438#define IEMOP_BODY_XADD_BYTE(a_fnWorker, a_Type) \
11439 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11442 IEMOP_HLP_DONE_DECODING(); \
11443 \
11444 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11445 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11446 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11447 \
11448 IEM_MC_LOCAL(uint8_t, u8RegCopy); \
11449 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11450 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, u8RegCopy, 1); \
11451 \
11452 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11453 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker, pu8Dst, pu8Reg, pEFlags); \
11454 \
11455 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11456 IEM_MC_COMMIT_EFLAGS(EFlags); \
11457 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy); \
11458 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11459 IEM_MC_END()
11460 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11461 {
11462 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8,RW);
11463 }
11464 else
11465 {
11466 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8_locked,ATOMIC);
11467 }
11468 }
11469}
11470
11471
11472/**
11473 * @opcode 0xc1
11474 * @opflclass arithmetic
11475 */
11476FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11477{
11478 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11479 IEMOP_HLP_MIN_486();
11480 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11481
11482 /*
11483 * If rm is denoting a register, no more instruction bytes.
11484 */
11485 if (IEM_IS_MODRM_REG_MODE(bRm))
11486 {
11487 switch (pVCpu->iem.s.enmEffOpSize)
11488 {
11489 case IEMMODE_16BIT:
11490 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11492 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11493 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11494 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11495
11496 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11497 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11498 IEM_MC_REF_EFLAGS(pEFlags);
11499 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11500
11501 IEM_MC_ADVANCE_RIP_AND_FINISH();
11502 IEM_MC_END();
11503 break;
11504
11505 case IEMMODE_32BIT:
11506 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11508 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11509 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11510 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11511
11512 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11513 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11514 IEM_MC_REF_EFLAGS(pEFlags);
11515 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11516
11517 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
11518 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11519 IEM_MC_ADVANCE_RIP_AND_FINISH();
11520 IEM_MC_END();
11521 break;
11522
11523 case IEMMODE_64BIT:
11524 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11526 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11527 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11528 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11529
11530 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11531 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11532 IEM_MC_REF_EFLAGS(pEFlags);
11533 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11534
11535 IEM_MC_ADVANCE_RIP_AND_FINISH();
11536 IEM_MC_END();
11537 break;
11538
11539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11540 }
11541 }
11542 else
11543 {
11544 /*
11545 * We're accessing memory.
11546 */
11547#define IEMOP_BODY_XADD_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
11548 do { \
11549 switch (pVCpu->iem.s.enmEffOpSize) \
11550 { \
11551 case IEMMODE_16BIT: \
11552 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11555 IEMOP_HLP_DONE_DECODING(); \
11556 \
11557 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11558 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11559 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11560 \
11561 IEM_MC_LOCAL(uint16_t, u16RegCopy); \
11562 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11563 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, u16RegCopy, 1); \
11564 \
11565 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11566 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker16, pu16Dst, pu16Reg, pEFlags); \
11567 \
11568 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11569 IEM_MC_COMMIT_EFLAGS(EFlags); \
11570 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy); \
11571 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11572 IEM_MC_END(); \
11573 break; \
11574 \
11575 case IEMMODE_32BIT: \
11576 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11579 IEMOP_HLP_DONE_DECODING(); \
11580 \
11581 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11582 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11583 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11584 \
11585 IEM_MC_LOCAL(uint32_t, u32RegCopy); \
11586 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11587 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, u32RegCopy, 1); \
11588 \
11589 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11590 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker32, pu32Dst, pu32Reg, pEFlags); \
11591 \
11592 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11593 IEM_MC_COMMIT_EFLAGS(EFlags); \
11594 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy); \
11595 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11596 IEM_MC_END(); \
11597 break; \
11598 \
11599 case IEMMODE_64BIT: \
11600 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11603 IEMOP_HLP_DONE_DECODING(); \
11604 \
11605 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11606 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11607 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11608 \
11609 IEM_MC_LOCAL(uint64_t, u64RegCopy); \
11610 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11611 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, u64RegCopy, 1); \
11612 \
11613 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11614 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker64, pu64Dst, pu64Reg, pEFlags); \
11615 \
11616 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11617 IEM_MC_COMMIT_EFLAGS(EFlags); \
11618 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy); \
11619 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11620 IEM_MC_END(); \
11621 break; \
11622 \
11623 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11624 } \
11625 } while (0)
11626
11627 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11628 {
11629 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16, iemAImpl_xadd_u32, iemAImpl_xadd_u64,RW);
11630 }
11631 else
11632 {
11633 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16_locked, iemAImpl_xadd_u32_locked, iemAImpl_xadd_u64_locked,ATOMIC);
11634 }
11635 }
11636}
11637
11638
11639/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11640FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11641{
11642 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11643
11644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11645 if (IEM_IS_MODRM_REG_MODE(bRm))
11646 {
11647 /*
11648 * XMM, XMM.
11649 */
11650 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11651 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11653 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11654 IEM_MC_LOCAL(X86XMMREG, Dst);
11655 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11656 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11657 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11658 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11659 IEM_MC_PREPARE_SSE_USAGE();
11660 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11661 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpps_u128, pDst, pSrc, bImmArg);
11662 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11663 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11664
11665 IEM_MC_ADVANCE_RIP_AND_FINISH();
11666 IEM_MC_END();
11667 }
11668 else
11669 {
11670 /*
11671 * XMM, [mem128].
11672 */
11673 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11674 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11675 IEM_MC_LOCAL(X86XMMREG, Dst);
11676 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11677 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11679
11680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11681 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11682 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11684 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11685 IEM_MC_PREPARE_SSE_USAGE();
11686
11687 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11688 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpps_u128, pDst, pSrc, bImmArg);
11689 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11690 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11691
11692 IEM_MC_ADVANCE_RIP_AND_FINISH();
11693 IEM_MC_END();
11694 }
11695}
11696
11697
11698/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11699FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11700{
11701 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11702
11703 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11704 if (IEM_IS_MODRM_REG_MODE(bRm))
11705 {
11706 /*
11707 * XMM, XMM.
11708 */
11709 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11710 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11712 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11713 IEM_MC_LOCAL(X86XMMREG, Dst);
11714 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11715 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11716 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11717 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11718 IEM_MC_PREPARE_SSE_USAGE();
11719 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11720 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmppd_u128, pDst, pSrc, bImmArg);
11721 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11722 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11723
11724 IEM_MC_ADVANCE_RIP_AND_FINISH();
11725 IEM_MC_END();
11726 }
11727 else
11728 {
11729 /*
11730 * XMM, [mem128].
11731 */
11732 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11733 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11734 IEM_MC_LOCAL(X86XMMREG, Dst);
11735 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11736 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11738
11739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11740 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11741 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11743 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11744 IEM_MC_PREPARE_SSE_USAGE();
11745
11746 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11747 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmppd_u128, pDst, pSrc, bImmArg);
11748 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11749 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11750
11751 IEM_MC_ADVANCE_RIP_AND_FINISH();
11752 IEM_MC_END();
11753 }
11754}
11755
11756
11757/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11758FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11759{
11760 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11761
11762 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11763 if (IEM_IS_MODRM_REG_MODE(bRm))
11764 {
11765 /*
11766 * XMM32, XMM32.
11767 */
11768 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11769 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11771 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11772 IEM_MC_LOCAL(X86XMMREG, Dst);
11773 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11774 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11775 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11776 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11777 IEM_MC_PREPARE_SSE_USAGE();
11778 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11779 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpss_u128, pDst, pSrc, bImmArg);
11780 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11781 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11782
11783 IEM_MC_ADVANCE_RIP_AND_FINISH();
11784 IEM_MC_END();
11785 }
11786 else
11787 {
11788 /*
11789 * XMM32, [mem32].
11790 */
11791 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11792 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11793 IEM_MC_LOCAL(X86XMMREG, Dst);
11794 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11795 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11797
11798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11799 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11800 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11802 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11803 IEM_MC_PREPARE_SSE_USAGE();
11804
11805 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
11806 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11807 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpss_u128, pDst, pSrc, bImmArg);
11808 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11809 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11810
11811 IEM_MC_ADVANCE_RIP_AND_FINISH();
11812 IEM_MC_END();
11813 }
11814}
11815
11816
11817/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11818FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11819{
11820 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11821
11822 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11823 if (IEM_IS_MODRM_REG_MODE(bRm))
11824 {
11825 /*
11826 * XMM64, XMM64.
11827 */
11828 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11829 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11831 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11832 IEM_MC_LOCAL(X86XMMREG, Dst);
11833 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11834 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11835 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11836 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11837 IEM_MC_PREPARE_SSE_USAGE();
11838 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11839 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpsd_u128, pDst, pSrc, bImmArg);
11840 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11841 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11842
11843 IEM_MC_ADVANCE_RIP_AND_FINISH();
11844 IEM_MC_END();
11845 }
11846 else
11847 {
11848 /*
11849 * XMM64, [mem64].
11850 */
11851 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11852 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11853 IEM_MC_LOCAL(X86XMMREG, Dst);
11854 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11855 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11857
11858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11859 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11860 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11862 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11863 IEM_MC_PREPARE_SSE_USAGE();
11864
11865 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
11866 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11867 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpsd_u128, pDst, pSrc, bImmArg);
11868 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11869 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11870
11871 IEM_MC_ADVANCE_RIP_AND_FINISH();
11872 IEM_MC_END();
11873 }
11874}
11875
11876
11877/** Opcode 0x0f 0xc3. */
11878FNIEMOP_DEF(iemOp_movnti_My_Gy)
11879{
11880 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
11881
11882 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11883
11884 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
11885 if (IEM_IS_MODRM_MEM_MODE(bRm))
11886 {
11887 switch (pVCpu->iem.s.enmEffOpSize)
11888 {
11889 case IEMMODE_32BIT:
11890 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11891 IEM_MC_LOCAL(uint32_t, u32Value);
11892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11893
11894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11896
11897 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11898 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11899 IEM_MC_ADVANCE_RIP_AND_FINISH();
11900 IEM_MC_END();
11901 break;
11902
11903 case IEMMODE_64BIT:
11904 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11905 IEM_MC_LOCAL(uint64_t, u64Value);
11906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11907
11908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11910
11911 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11912 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11913 IEM_MC_ADVANCE_RIP_AND_FINISH();
11914 IEM_MC_END();
11915 break;
11916
11917 case IEMMODE_16BIT:
11918 /** @todo check this form. */
11919 IEMOP_RAISE_INVALID_OPCODE_RET();
11920
11921 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11922 }
11923 }
11924 else
11925 IEMOP_RAISE_INVALID_OPCODE_RET();
11926}
11927
11928
11929/* Opcode 0x66 0x0f 0xc3 - invalid */
11930/* Opcode 0xf3 0x0f 0xc3 - invalid */
11931/* Opcode 0xf2 0x0f 0xc3 - invalid */
11932
11933
11934/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
11935FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
11936{
11937 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
11938 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11939 if (IEM_IS_MODRM_REG_MODE(bRm))
11940 {
11941 /*
11942 * Register, register.
11943 */
11944 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11945 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11946 IEM_MC_LOCAL(uint16_t, uValue);
11947
11948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
11949 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
11950 IEM_MC_PREPARE_FPU_USAGE();
11951 IEM_MC_FPU_TO_MMX_MODE();
11952
11953 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
11954 IEM_MC_STORE_MREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
11955
11956 IEM_MC_ADVANCE_RIP_AND_FINISH();
11957 IEM_MC_END();
11958 }
11959 else
11960 {
11961 /*
11962 * Register, memory.
11963 */
11964 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11966 IEM_MC_LOCAL(uint16_t, uValue);
11967
11968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11969 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
11971 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
11972 IEM_MC_PREPARE_FPU_USAGE();
11973
11974 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11975 IEM_MC_FPU_TO_MMX_MODE();
11976 IEM_MC_STORE_MREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
11977
11978 IEM_MC_ADVANCE_RIP_AND_FINISH();
11979 IEM_MC_END();
11980 }
11981}
11982
11983
11984/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
11985FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
11986{
11987 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11988 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11989 if (IEM_IS_MODRM_REG_MODE(bRm))
11990 {
11991 /*
11992 * Register, register.
11993 */
11994 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11995 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11997
11998 IEM_MC_LOCAL(uint16_t, uValue);
11999 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12000 IEM_MC_PREPARE_SSE_USAGE();
12001
12002 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
12003 IEM_MC_STORE_XREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
12004 IEM_MC_ADVANCE_RIP_AND_FINISH();
12005 IEM_MC_END();
12006 }
12007 else
12008 {
12009 /*
12010 * Register, memory.
12011 */
12012 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12014 IEM_MC_LOCAL(uint16_t, uValue);
12015
12016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12017 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12019 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12020 IEM_MC_PREPARE_SSE_USAGE();
12021
12022 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12023 IEM_MC_STORE_XREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
12024 IEM_MC_ADVANCE_RIP_AND_FINISH();
12025 IEM_MC_END();
12026 }
12027}
12028
12029
12030/* Opcode 0xf3 0x0f 0xc4 - invalid */
12031/* Opcode 0xf2 0x0f 0xc4 - invalid */
12032
12033
12034/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
12035FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
12036{
12037 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
12038 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12039 if (IEM_IS_MODRM_REG_MODE(bRm))
12040 {
12041 /*
12042 * Greg32, MMX, imm8.
12043 */
12044 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12045 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12047 IEM_MC_LOCAL(uint16_t, uValue);
12048 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12049 IEM_MC_PREPARE_FPU_USAGE();
12050 IEM_MC_FPU_TO_MMX_MODE();
12051 IEM_MC_FETCH_MREG_U16(uValue, IEM_GET_MODRM_RM_8(bRm), bImm & 3);
12052 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
12053 IEM_MC_ADVANCE_RIP_AND_FINISH();
12054 IEM_MC_END();
12055 }
12056 /* No memory operand. */
12057 else
12058 IEMOP_RAISE_INVALID_OPCODE_RET();
12059}
12060
12061
12062/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12063FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12064{
12065 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12066 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12067 if (IEM_IS_MODRM_REG_MODE(bRm))
12068 {
12069 /*
12070 * Greg32, XMM, imm8.
12071 */
12072 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12073 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12075 IEM_MC_LOCAL(uint16_t, uValue);
12076 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12077 IEM_MC_PREPARE_SSE_USAGE();
12078 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
12079 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
12080 IEM_MC_ADVANCE_RIP_AND_FINISH();
12081 IEM_MC_END();
12082 }
12083 /* No memory operand. */
12084 else
12085 IEMOP_RAISE_INVALID_OPCODE_RET();
12086}
12087
12088
12089/* Opcode 0xf3 0x0f 0xc5 - invalid */
12090/* Opcode 0xf2 0x0f 0xc5 - invalid */
12091
12092
12093/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12094FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12095{
12096 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12098 if (IEM_IS_MODRM_REG_MODE(bRm))
12099 {
12100 /*
12101 * XMM, XMM, imm8.
12102 */
12103 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12104 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12106 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12107 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12108 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12109 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12110 IEM_MC_PREPARE_SSE_USAGE();
12111 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12112 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12113 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12114 IEM_MC_ADVANCE_RIP_AND_FINISH();
12115 IEM_MC_END();
12116 }
12117 else
12118 {
12119 /*
12120 * XMM, [mem128], imm8.
12121 */
12122 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12123 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12124 IEM_MC_LOCAL(RTUINT128U, uSrc);
12125 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12127
12128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12129 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12130 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12132 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12133 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12134
12135 IEM_MC_PREPARE_SSE_USAGE();
12136 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12137 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12138
12139 IEM_MC_ADVANCE_RIP_AND_FINISH();
12140 IEM_MC_END();
12141 }
12142}
12143
12144
12145/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12146FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12147{
12148 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12149 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12150 if (IEM_IS_MODRM_REG_MODE(bRm))
12151 {
12152 /*
12153 * XMM, XMM, imm8.
12154 */
12155 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12156 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12158 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12159 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12160 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12161 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12162 IEM_MC_PREPARE_SSE_USAGE();
12163 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12164 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12165 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12166 IEM_MC_ADVANCE_RIP_AND_FINISH();
12167 IEM_MC_END();
12168 }
12169 else
12170 {
12171 /*
12172 * XMM, [mem128], imm8.
12173 */
12174 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12175 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12176 IEM_MC_LOCAL(RTUINT128U, uSrc);
12177 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12179
12180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12181 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12182 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12184 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12185 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12186
12187 IEM_MC_PREPARE_SSE_USAGE();
12188 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12189 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12190
12191 IEM_MC_ADVANCE_RIP_AND_FINISH();
12192 IEM_MC_END();
12193 }
12194}
12195
12196
12197/* Opcode 0xf3 0x0f 0xc6 - invalid */
12198/* Opcode 0xf2 0x0f 0xc6 - invalid */
12199
12200
12201/**
12202 * @opmaps grp9
12203 * @opcode /1
12204 * @opcodesub !11 mr/reg rex.w=0
12205 * @oppfx n/a
12206 * @opflmodify zf
12207 */
12208FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12209{
12210 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12211#define IEMOP_BODY_CMPXCHG8B(a_fnWorker, a_Type) \
12212 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
12213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12215 IEMOP_HLP_DONE_DECODING_EX(fCmpXchg8b); \
12216 \
12217 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12218 IEM_MC_ARG(uint64_t *, pu64MemDst, 0); \
12219 IEM_MC_MEM_MAP_U64_##a_Type(pu64MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12220 \
12221 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx); \
12222 IEM_MC_FETCH_GREG_PAIR_U32(u64EaxEdx, X86_GREG_xAX, X86_GREG_xDX); \
12223 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EaxEdx, u64EaxEdx, 1); \
12224 \
12225 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx); \
12226 IEM_MC_FETCH_GREG_PAIR_U32(u64EbxEcx, X86_GREG_xBX, X86_GREG_xCX); \
12227 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EbxEcx, u64EbxEcx, 2); \
12228 \
12229 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
12230 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags); \
12231 \
12232 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12233 IEM_MC_COMMIT_EFLAGS(EFlags); \
12234 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12235 IEM_MC_STORE_GREG_PAIR_U32(X86_GREG_xAX, X86_GREG_xDX, u64EaxEdx); \
12236 } IEM_MC_ENDIF(); \
12237 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12238 \
12239 IEM_MC_END()
12240 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12241 {
12242 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b,RW);
12243 }
12244 else
12245 {
12246 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b_locked,ATOMIC);
12247 }
12248}
12249
12250
12251/**
12252 * @opmaps grp9
12253 * @opcode /1
12254 * @opcodesub !11 mr/reg rex.w=1
12255 * @oppfx n/a
12256 * @opflmodify zf
12257 */
12258FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12259{
12260 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12261 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12262 {
12263 /*
12264 * This is hairy, very hairy macro fun. We're walking a fine line
12265 * here to make the code parsable by IEMAllInstPython.py and fit into
12266 * the patterns IEMAllThrdPython.py requires for the code morphing.
12267 */
12268#define BODY_CMPXCHG16B_HEAD(bUnmapInfoStmt, a_Type) \
12269 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
12270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12272 IEMOP_HLP_DONE_DECODING(); \
12273 \
12274 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16); \
12275 bUnmapInfoStmt; \
12276 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0); \
12277 IEM_MC_MEM_MAP_U128_##a_Type(pu128MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12278 \
12279 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx); \
12280 IEM_MC_FETCH_GREG_PAIR_U64(u128RaxRdx, X86_GREG_xAX, X86_GREG_xDX); \
12281 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RaxRdx, u128RaxRdx, 1); \
12282 \
12283 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx); \
12284 IEM_MC_FETCH_GREG_PAIR_U64(u128RbxRcx, X86_GREG_xBX, X86_GREG_xCX); \
12285 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RbxRcx, u128RbxRcx, 2); \
12286 \
12287 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3)
12288
12289#define BODY_CMPXCHG16B_TAIL(a_Type) \
12290 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12291 IEM_MC_COMMIT_EFLAGS(EFlags); \
12292 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12293 IEM_MC_STORE_GREG_PAIR_U64(X86_GREG_xAX, X86_GREG_xDX, u128RaxRdx); \
12294 } IEM_MC_ENDIF(); \
12295 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12296 IEM_MC_END()
12297
12298#ifdef RT_ARCH_AMD64
12299 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12300 {
12301 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12302 {
12303 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12304 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12305 BODY_CMPXCHG16B_TAIL(RW);
12306 }
12307 else
12308 {
12309 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12310 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12311 BODY_CMPXCHG16B_TAIL(ATOMIC);
12312 }
12313 }
12314 else
12315 { /* (see comments in #else case below) */
12316 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12317 {
12318 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12319 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12320 BODY_CMPXCHG16B_TAIL(RW);
12321 }
12322 else
12323 {
12324 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12325 IEM_MC_CALL_CIMPL_5(IEM_CIMPL_F_STATUS_FLAGS,
12326 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12327 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12328 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx,
12329 pEFlags, bUnmapInfo);
12330 IEM_MC_END();
12331 }
12332 }
12333
12334#elif defined(RT_ARCH_ARM64)
12335 /** @todo may require fallback for unaligned accesses... */
12336 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12337 {
12338 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12339 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12340 BODY_CMPXCHG16B_TAIL(RW);
12341 }
12342 else
12343 {
12344 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12345 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12346 BODY_CMPXCHG16B_TAIL(ATOMIC);
12347 }
12348
12349#else
12350 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12351 accesses and not all all atomic, which works fine on in UNI CPU guest
12352 configuration (ignoring DMA). If guest SMP is active we have no choice
12353 but to use a rendezvous callback here. Sigh. */
12354 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12355 {
12356 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12357 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12358 BODY_CMPXCHG16B_TAIL(RW);
12359 }
12360 else
12361 {
12362 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12363 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12364 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12365 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12366 iemCImpl_cmpxchg16b_fallback_rendezvous,
12367 pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12368 IEM_MC_END();
12369 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12370 }
12371#endif
12372
12373#undef BODY_CMPXCHG16B
12374 }
12375 Log(("cmpxchg16b -> #UD\n"));
12376 IEMOP_RAISE_INVALID_OPCODE_RET();
12377}
12378
12379FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12380{
12381 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12382 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12383 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12384}
12385
12386
12387/** Opcode 0x0f 0xc7 11/6. */
12388FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12389{
12390 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12391 IEMOP_RAISE_INVALID_OPCODE_RET();
12392
12393 if (IEM_IS_MODRM_REG_MODE(bRm))
12394 {
12395 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12397 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12398 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12399 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12400 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12401 iemCImpl_rdrand, iReg, enmEffOpSize);
12402 IEM_MC_END();
12403 }
12404 /* Register only. */
12405 else
12406 IEMOP_RAISE_INVALID_OPCODE_RET();
12407}
12408
12409/** Opcode 0x0f 0xc7 !11/6. */
12410#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12411FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12412{
12413 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12414 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12415 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12416 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12417 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12419 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12420 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12421 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12422 IEM_MC_END();
12423}
12424#else
12425FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12426#endif
12427
12428/** Opcode 0x66 0x0f 0xc7 !11/6. */
12429#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12430FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12431{
12432 IEMOP_MNEMONIC(vmclear, "vmclear");
12433 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12434 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12435 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12436 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12438 IEMOP_HLP_DONE_DECODING();
12439 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12440 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12441 IEM_MC_END();
12442}
12443#else
12444FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12445#endif
12446
12447/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12448#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12449FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12450{
12451 IEMOP_MNEMONIC(vmxon, "vmxon");
12452 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12453 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12454 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12455 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12456 IEMOP_HLP_DONE_DECODING();
12457 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12458 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12459 IEM_MC_END();
12460}
12461#else
12462FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12463#endif
12464
12465/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12466#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12467FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12468{
12469 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12470 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12471 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12472 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12473 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12475 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12476 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12477 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12478 IEM_MC_END();
12479}
12480#else
12481FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12482#endif
12483
12484/** Opcode 0x0f 0xc7 11/7. */
12485FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12486{
12487 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12488 IEMOP_RAISE_INVALID_OPCODE_RET();
12489
12490 if (IEM_IS_MODRM_REG_MODE(bRm))
12491 {
12492 /* register destination. */
12493 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12495 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12496 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12497 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12498 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12499 iemCImpl_rdseed, iReg, enmEffOpSize);
12500 IEM_MC_END();
12501 }
12502 /* Register only. */
12503 else
12504 IEMOP_RAISE_INVALID_OPCODE_RET();
12505}
12506
12507/**
12508 * Group 9 jump table for register variant.
12509 */
12510IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12511{ /* pfx: none, 066h, 0f3h, 0f2h */
12512 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12513 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12514 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12515 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12516 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12517 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12518 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12519 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12520};
12521AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12522
12523
12524/**
12525 * Group 9 jump table for memory variant.
12526 */
12527IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12528{ /* pfx: none, 066h, 0f3h, 0f2h */
12529 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12530 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12531 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12532 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12533 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12534 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12535 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12536 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12537};
12538AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12539
12540
12541/** Opcode 0x0f 0xc7. */
12542FNIEMOP_DEF(iemOp_Grp9)
12543{
12544 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12545 if (IEM_IS_MODRM_REG_MODE(bRm))
12546 /* register, register */
12547 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12548 + pVCpu->iem.s.idxPrefix], bRm);
12549 /* memory, register */
12550 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12551 + pVCpu->iem.s.idxPrefix], bRm);
12552}
12553
12554
12555/**
12556 * Common 'bswap register' helper.
12557 */
12558FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12559{
12560 switch (pVCpu->iem.s.enmEffOpSize)
12561 {
12562 case IEMMODE_16BIT:
12563 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
12564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12565 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12566 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12567 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12568 IEM_MC_ADVANCE_RIP_AND_FINISH();
12569 IEM_MC_END();
12570 break;
12571
12572 case IEMMODE_32BIT:
12573 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
12574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12575 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12576 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12577 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12578 IEM_MC_CLEAR_HIGH_GREG_U64(iReg);
12579 IEM_MC_ADVANCE_RIP_AND_FINISH();
12580 IEM_MC_END();
12581 break;
12582
12583 case IEMMODE_64BIT:
12584 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
12585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12586 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12587 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12588 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12589 IEM_MC_ADVANCE_RIP_AND_FINISH();
12590 IEM_MC_END();
12591 break;
12592
12593 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12594 }
12595}
12596
12597
12598/** Opcode 0x0f 0xc8. */
12599FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12600{
12601 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12602 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12603 prefix. REX.B is the correct prefix it appears. For a parallel
12604 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12605 IEMOP_HLP_MIN_486();
12606 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12607}
12608
12609
12610/** Opcode 0x0f 0xc9. */
12611FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12612{
12613 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12614 IEMOP_HLP_MIN_486();
12615 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12616}
12617
12618
12619/** Opcode 0x0f 0xca. */
12620FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12621{
12622 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12623 IEMOP_HLP_MIN_486();
12624 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12625}
12626
12627
12628/** Opcode 0x0f 0xcb. */
12629FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12630{
12631 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12632 IEMOP_HLP_MIN_486();
12633 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12634}
12635
12636
12637/** Opcode 0x0f 0xcc. */
12638FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12639{
12640 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12641 IEMOP_HLP_MIN_486();
12642 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12643}
12644
12645
12646/** Opcode 0x0f 0xcd. */
12647FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12648{
12649 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12650 IEMOP_HLP_MIN_486();
12651 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12652}
12653
12654
12655/** Opcode 0x0f 0xce. */
12656FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12657{
12658 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12659 IEMOP_HLP_MIN_486();
12660 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12661}
12662
12663
12664/** Opcode 0x0f 0xcf. */
12665FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12666{
12667 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12668 IEMOP_HLP_MIN_486();
12669 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12670}
12671
12672
12673/* Opcode 0x0f 0xd0 - invalid */
12674
12675
12676/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12677FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12678{
12679 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12680 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12681}
12682
12683
12684/* Opcode 0xf3 0x0f 0xd0 - invalid */
12685
12686
12687/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12688FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12689{
12690 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12691 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12692}
12693
12694
12695
12696/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12697FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12698{
12699 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12700 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12701}
12702
12703/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12704FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12705{
12706 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12707 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12708}
12709
12710/* Opcode 0xf3 0x0f 0xd1 - invalid */
12711/* Opcode 0xf2 0x0f 0xd1 - invalid */
12712
12713/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12714FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12715{
12716 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12717 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12718}
12719
12720
12721/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12722FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12723{
12724 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12725 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12726}
12727
12728
12729/* Opcode 0xf3 0x0f 0xd2 - invalid */
12730/* Opcode 0xf2 0x0f 0xd2 - invalid */
12731
12732/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12733FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12734{
12735 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12736 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12737}
12738
12739
12740/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12741FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12742{
12743 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12744 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12745}
12746
12747
12748/* Opcode 0xf3 0x0f 0xd3 - invalid */
12749/* Opcode 0xf2 0x0f 0xd3 - invalid */
12750
12751
12752/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12753FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12754{
12755 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12756 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
12757}
12758
12759
12760/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12761FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12762{
12763 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12764 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddq_u128);
12765}
12766
12767
12768/* Opcode 0xf3 0x0f 0xd4 - invalid */
12769/* Opcode 0xf2 0x0f 0xd4 - invalid */
12770
12771/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12772FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12773{
12774 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12775 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmullw_u64);
12776}
12777
12778/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12779FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12780{
12781 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12782 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmullw_u128);
12783}
12784
12785
12786/* Opcode 0xf3 0x0f 0xd5 - invalid */
12787/* Opcode 0xf2 0x0f 0xd5 - invalid */
12788
12789/* Opcode 0x0f 0xd6 - invalid */
12790
12791/**
12792 * @opcode 0xd6
12793 * @oppfx 0x66
12794 * @opcpuid sse2
12795 * @opgroup og_sse2_pcksclr_datamove
12796 * @opxcpttype none
12797 * @optest op1=-1 op2=2 -> op1=2
12798 * @optest op1=0 op2=-42 -> op1=-42
12799 */
12800FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12801{
12802 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12803 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12804 if (IEM_IS_MODRM_REG_MODE(bRm))
12805 {
12806 /*
12807 * Register, register.
12808 */
12809 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12811 IEM_MC_LOCAL(uint64_t, uSrc);
12812
12813 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12814 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12815
12816 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12817 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12818
12819 IEM_MC_ADVANCE_RIP_AND_FINISH();
12820 IEM_MC_END();
12821 }
12822 else
12823 {
12824 /*
12825 * Memory, register.
12826 */
12827 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12828 IEM_MC_LOCAL(uint64_t, uSrc);
12829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12830
12831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12833 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12834 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12835
12836 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12837 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12838
12839 IEM_MC_ADVANCE_RIP_AND_FINISH();
12840 IEM_MC_END();
12841 }
12842}
12843
12844
12845/**
12846 * @opcode 0xd6
12847 * @opcodesub 11 mr/reg
12848 * @oppfx f3
12849 * @opcpuid sse2
12850 * @opgroup og_sse2_simdint_datamove
12851 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12852 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12853 */
12854FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12855{
12856 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12857 if (IEM_IS_MODRM_REG_MODE(bRm))
12858 {
12859 /*
12860 * Register, register.
12861 */
12862 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12863 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12865 IEM_MC_LOCAL(uint64_t, uSrc);
12866
12867 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12868 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12869 IEM_MC_FPU_TO_MMX_MODE();
12870
12871 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
12872 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
12873
12874 IEM_MC_ADVANCE_RIP_AND_FINISH();
12875 IEM_MC_END();
12876 }
12877
12878 /**
12879 * @opdone
12880 * @opmnemonic udf30fd6mem
12881 * @opcode 0xd6
12882 * @opcodesub !11 mr/reg
12883 * @oppfx f3
12884 * @opunused intel-modrm
12885 * @opcpuid sse
12886 * @optest ->
12887 */
12888 else
12889 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12890}
12891
12892
12893/**
12894 * @opcode 0xd6
12895 * @opcodesub 11 mr/reg
12896 * @oppfx f2
12897 * @opcpuid sse2
12898 * @opgroup og_sse2_simdint_datamove
12899 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12900 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12901 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
12902 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
12903 * @optest op1=-42 op2=0xfedcba9876543210
12904 * -> op1=0xfedcba9876543210 ftw=0xff
12905 */
12906FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
12907{
12908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12909 if (IEM_IS_MODRM_REG_MODE(bRm))
12910 {
12911 /*
12912 * Register, register.
12913 */
12914 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12915 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12917 IEM_MC_LOCAL(uint64_t, uSrc);
12918
12919 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12920 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12921 IEM_MC_FPU_TO_MMX_MODE();
12922
12923 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
12924 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
12925
12926 IEM_MC_ADVANCE_RIP_AND_FINISH();
12927 IEM_MC_END();
12928 }
12929
12930 /**
12931 * @opdone
12932 * @opmnemonic udf20fd6mem
12933 * @opcode 0xd6
12934 * @opcodesub !11 mr/reg
12935 * @oppfx f2
12936 * @opunused intel-modrm
12937 * @opcpuid sse
12938 * @optest ->
12939 */
12940 else
12941 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12942}
12943
12944
12945/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
12946FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
12947{
12948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12949 /* Docs says register only. */
12950 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12951 {
12952 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12953 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
12954 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12956 IEM_MC_ARG(uint64_t *, puDst, 0);
12957 IEM_MC_ARG(uint64_t const *, puSrc, 1);
12958 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12959 IEM_MC_PREPARE_FPU_USAGE();
12960 IEM_MC_FPU_TO_MMX_MODE();
12961
12962 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12963 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
12964 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
12965
12966 IEM_MC_ADVANCE_RIP_AND_FINISH();
12967 IEM_MC_END();
12968 }
12969 else
12970 IEMOP_RAISE_INVALID_OPCODE_RET();
12971}
12972
12973
12974/** Opcode 0x66 0x0f 0xd7 - */
12975FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
12976{
12977 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12978 /* Docs says register only. */
12979 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12980 {
12981 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12982 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
12983 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12985 IEM_MC_ARG(uint64_t *, puDst, 0);
12986 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12987 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12988 IEM_MC_PREPARE_SSE_USAGE();
12989 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12990 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12991 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
12992 IEM_MC_ADVANCE_RIP_AND_FINISH();
12993 IEM_MC_END();
12994 }
12995 else
12996 IEMOP_RAISE_INVALID_OPCODE_RET();
12997}
12998
12999
13000/* Opcode 0xf3 0x0f 0xd7 - invalid */
13001/* Opcode 0xf2 0x0f 0xd7 - invalid */
13002
13003
13004/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
13005FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
13006{
13007 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13008 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubusb_u64);
13009}
13010
13011
13012/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
13013FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
13014{
13015 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13016 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubusb_u128);
13017}
13018
13019
13020/* Opcode 0xf3 0x0f 0xd8 - invalid */
13021/* Opcode 0xf2 0x0f 0xd8 - invalid */
13022
13023/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13024FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13025{
13026 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13027 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubusw_u64);
13028}
13029
13030
13031/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13032FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13033{
13034 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13035 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubusw_u128);
13036}
13037
13038
13039/* Opcode 0xf3 0x0f 0xd9 - invalid */
13040/* Opcode 0xf2 0x0f 0xd9 - invalid */
13041
13042/** Opcode 0x0f 0xda - pminub Pq, Qq */
13043FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13044{
13045 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13046 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pminub_u64);
13047}
13048
13049
13050/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13051FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13052{
13053 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13054 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pminub_u128);
13055}
13056
13057/* Opcode 0xf3 0x0f 0xda - invalid */
13058/* Opcode 0xf2 0x0f 0xda - invalid */
13059
13060/** Opcode 0x0f 0xdb - pand Pq, Qq */
13061FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13062{
13063 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13064 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pand_u64);
13065}
13066
13067
13068/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13069FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13070{
13071 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13072 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pand_u128);
13073}
13074
13075
13076/* Opcode 0xf3 0x0f 0xdb - invalid */
13077/* Opcode 0xf2 0x0f 0xdb - invalid */
13078
13079/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13080FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13081{
13082 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13083 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddusb_u64);
13084}
13085
13086
13087/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13088FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13089{
13090 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13091 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddusb_u128);
13092}
13093
13094
13095/* Opcode 0xf3 0x0f 0xdc - invalid */
13096/* Opcode 0xf2 0x0f 0xdc - invalid */
13097
13098/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13099FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13100{
13101 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13102 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddusw_u64);
13103}
13104
13105
13106/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13107FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13108{
13109 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13110 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddusw_u128);
13111}
13112
13113
13114/* Opcode 0xf3 0x0f 0xdd - invalid */
13115/* Opcode 0xf2 0x0f 0xdd - invalid */
13116
13117/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13118FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13119{
13120 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13121 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmaxub_u64);
13122}
13123
13124
13125/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13126FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13127{
13128 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13129 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaxub_u128);
13130}
13131
13132/* Opcode 0xf3 0x0f 0xde - invalid */
13133/* Opcode 0xf2 0x0f 0xde - invalid */
13134
13135
13136/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13137FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13138{
13139 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13140 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pandn_u64);
13141}
13142
13143
13144/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13145FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13146{
13147 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13148 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pandn_u128);
13149}
13150
13151
13152/* Opcode 0xf3 0x0f 0xdf - invalid */
13153/* Opcode 0xf2 0x0f 0xdf - invalid */
13154
13155/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13156FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13157{
13158 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13159 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13160}
13161
13162
13163/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13164FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13165{
13166 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13167 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13168}
13169
13170
13171/* Opcode 0xf3 0x0f 0xe0 - invalid */
13172/* Opcode 0xf2 0x0f 0xe0 - invalid */
13173
13174/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13175FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13176{
13177 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13178 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13179}
13180
13181
13182/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13183FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13184{
13185 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13186 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13187}
13188
13189
13190/* Opcode 0xf3 0x0f 0xe1 - invalid */
13191/* Opcode 0xf2 0x0f 0xe1 - invalid */
13192
13193/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13194FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13195{
13196 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13197 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13198}
13199
13200
13201/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13202FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13203{
13204 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13205 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13206}
13207
13208
13209/* Opcode 0xf3 0x0f 0xe2 - invalid */
13210/* Opcode 0xf2 0x0f 0xe2 - invalid */
13211
13212/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13213FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13214{
13215 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13216 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13217}
13218
13219
13220/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13221FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13222{
13223 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13224 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13225}
13226
13227
13228/* Opcode 0xf3 0x0f 0xe3 - invalid */
13229/* Opcode 0xf2 0x0f 0xe3 - invalid */
13230
13231/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13232FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13233{
13234 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13235 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13236}
13237
13238
13239/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13240FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13241{
13242 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13243 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13244}
13245
13246
13247/* Opcode 0xf3 0x0f 0xe4 - invalid */
13248/* Opcode 0xf2 0x0f 0xe4 - invalid */
13249
13250/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13251FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13252{
13253 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13254 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmulhw_u64);
13255}
13256
13257
13258/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13259FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13260{
13261 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13262 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhw_u128);
13263}
13264
13265
13266/* Opcode 0xf3 0x0f 0xe5 - invalid */
13267/* Opcode 0xf2 0x0f 0xe5 - invalid */
13268/* Opcode 0x0f 0xe6 - invalid */
13269
13270
13271/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13272FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13273{
13274 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13275 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13276}
13277
13278
13279/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13280FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13281{
13282 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13283 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13284}
13285
13286
13287/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13288FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13289{
13290 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13291 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13292}
13293
13294
13295/**
13296 * @opcode 0xe7
13297 * @opcodesub !11 mr/reg
13298 * @oppfx none
13299 * @opcpuid sse
13300 * @opgroup og_sse1_cachect
13301 * @opxcpttype none
13302 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13303 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13304 */
13305FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13306{
13307 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13309 if (IEM_IS_MODRM_MEM_MODE(bRm))
13310 {
13311 /* Register, memory. */
13312 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13313 IEM_MC_LOCAL(uint64_t, uSrc);
13314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13315
13316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13318 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13319 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13320 IEM_MC_FPU_TO_MMX_MODE();
13321
13322 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13323 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13324
13325 IEM_MC_ADVANCE_RIP_AND_FINISH();
13326 IEM_MC_END();
13327 }
13328 /**
13329 * @opdone
13330 * @opmnemonic ud0fe7reg
13331 * @opcode 0xe7
13332 * @opcodesub 11 mr/reg
13333 * @oppfx none
13334 * @opunused immediate
13335 * @opcpuid sse
13336 * @optest ->
13337 */
13338 else
13339 IEMOP_RAISE_INVALID_OPCODE_RET();
13340}
13341
13342/**
13343 * @opcode 0xe7
13344 * @opcodesub !11 mr/reg
13345 * @oppfx 0x66
13346 * @opcpuid sse2
13347 * @opgroup og_sse2_cachect
13348 * @opxcpttype 1
13349 * @optest op1=-1 op2=2 -> op1=2
13350 * @optest op1=0 op2=-42 -> op1=-42
13351 */
13352FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13353{
13354 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13356 if (IEM_IS_MODRM_MEM_MODE(bRm))
13357 {
13358 /* Register, memory. */
13359 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13360 IEM_MC_LOCAL(RTUINT128U, uSrc);
13361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13362
13363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13365 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13366 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13367
13368 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13369 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13370
13371 IEM_MC_ADVANCE_RIP_AND_FINISH();
13372 IEM_MC_END();
13373 }
13374
13375 /**
13376 * @opdone
13377 * @opmnemonic ud660fe7reg
13378 * @opcode 0xe7
13379 * @opcodesub 11 mr/reg
13380 * @oppfx 0x66
13381 * @opunused immediate
13382 * @opcpuid sse
13383 * @optest ->
13384 */
13385 else
13386 IEMOP_RAISE_INVALID_OPCODE_RET();
13387}
13388
13389/* Opcode 0xf3 0x0f 0xe7 - invalid */
13390/* Opcode 0xf2 0x0f 0xe7 - invalid */
13391
13392
13393/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13394FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13395{
13396 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13397 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubsb_u64);
13398}
13399
13400
13401/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13402FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13403{
13404 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13405 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubsb_u128);
13406}
13407
13408
13409/* Opcode 0xf3 0x0f 0xe8 - invalid */
13410/* Opcode 0xf2 0x0f 0xe8 - invalid */
13411
13412/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13413FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13414{
13415 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13416 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubsw_u64);
13417}
13418
13419
13420/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13421FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13422{
13423 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13424 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubsw_u128);
13425}
13426
13427
13428/* Opcode 0xf3 0x0f 0xe9 - invalid */
13429/* Opcode 0xf2 0x0f 0xe9 - invalid */
13430
13431
13432/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13433FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13434{
13435 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13436 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pminsw_u64);
13437}
13438
13439
13440/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13441FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13442{
13443 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13444 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pminsw_u128);
13445}
13446
13447
13448/* Opcode 0xf3 0x0f 0xea - invalid */
13449/* Opcode 0xf2 0x0f 0xea - invalid */
13450
13451
13452/** Opcode 0x0f 0xeb - por Pq, Qq */
13453FNIEMOP_DEF(iemOp_por_Pq_Qq)
13454{
13455 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13456 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_por_u64);
13457}
13458
13459
13460/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13461FNIEMOP_DEF(iemOp_por_Vx_Wx)
13462{
13463 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13464 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_por_u128);
13465}
13466
13467
13468/* Opcode 0xf3 0x0f 0xeb - invalid */
13469/* Opcode 0xf2 0x0f 0xeb - invalid */
13470
13471/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13472FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13473{
13474 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13475 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddsb_u64);
13476}
13477
13478
13479/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13480FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13481{
13482 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13483 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddsb_u128);
13484}
13485
13486
13487/* Opcode 0xf3 0x0f 0xec - invalid */
13488/* Opcode 0xf2 0x0f 0xec - invalid */
13489
13490/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13491FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13492{
13493 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13494 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddsw_u64);
13495}
13496
13497
13498/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13499FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13500{
13501 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13502 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddsw_u128);
13503}
13504
13505
13506/* Opcode 0xf3 0x0f 0xed - invalid */
13507/* Opcode 0xf2 0x0f 0xed - invalid */
13508
13509
13510/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13511FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13512{
13513 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13514 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13515}
13516
13517
13518/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13519FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13520{
13521 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13522 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13523}
13524
13525
13526/* Opcode 0xf3 0x0f 0xee - invalid */
13527/* Opcode 0xf2 0x0f 0xee - invalid */
13528
13529
13530/** Opcode 0x0f 0xef - pxor Pq, Qq */
13531FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13532{
13533 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13534 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pxor_u64);
13535}
13536
13537
13538/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13539FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13540{
13541 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13542 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pxor_u128);
13543}
13544
13545
13546/* Opcode 0xf3 0x0f 0xef - invalid */
13547/* Opcode 0xf2 0x0f 0xef - invalid */
13548
13549/* Opcode 0x0f 0xf0 - invalid */
13550/* Opcode 0x66 0x0f 0xf0 - invalid */
13551
13552
13553/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13554FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13555{
13556 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13558 if (IEM_IS_MODRM_REG_MODE(bRm))
13559 {
13560 /*
13561 * Register, register - (not implemented, assuming it raises \#UD).
13562 */
13563 IEMOP_RAISE_INVALID_OPCODE_RET();
13564 }
13565 else
13566 {
13567 /*
13568 * Register, memory.
13569 */
13570 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13571 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13573
13574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
13576 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13577 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13578 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13579 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13580
13581 IEM_MC_ADVANCE_RIP_AND_FINISH();
13582 IEM_MC_END();
13583 }
13584}
13585
13586
13587/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13588FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13589{
13590 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13591 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13592}
13593
13594
13595/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13596FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13597{
13598 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13599 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13600}
13601
13602
13603/* Opcode 0xf2 0x0f 0xf1 - invalid */
13604
13605/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13606FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13607{
13608 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13609 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13610}
13611
13612
13613/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13614FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13615{
13616 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13617 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13618}
13619
13620
13621/* Opcode 0xf2 0x0f 0xf2 - invalid */
13622
13623/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13624FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13625{
13626 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13627 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13628}
13629
13630
13631/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13632FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13633{
13634 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13635 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13636}
13637
13638/* Opcode 0xf2 0x0f 0xf3 - invalid */
13639
13640/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13641FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13642{
13643 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13644 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmuludq_u64);
13645}
13646
13647
13648/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13649FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13650{
13651 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13652 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmuludq_u128);
13653}
13654
13655
13656/* Opcode 0xf2 0x0f 0xf4 - invalid */
13657
13658/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13659FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13660{
13661 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13662 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13663}
13664
13665
13666/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13667FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13668{
13669 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13670 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13671}
13672
13673/* Opcode 0xf2 0x0f 0xf5 - invalid */
13674
13675/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13676FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13677{
13678 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13679 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13680}
13681
13682
13683/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13684FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13685{
13686 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13687 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13688}
13689
13690
13691/* Opcode 0xf2 0x0f 0xf6 - invalid */
13692
13693/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13694FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13695/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13696FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13697/* Opcode 0xf2 0x0f 0xf7 - invalid */
13698
13699
13700/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13701FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13702{
13703 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13704 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubb_u64);
13705}
13706
13707
13708/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13709FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13710{
13711 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13712 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubb_u128);
13713}
13714
13715
13716/* Opcode 0xf2 0x0f 0xf8 - invalid */
13717
13718
13719/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13720FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13721{
13722 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13723 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubw_u64);
13724}
13725
13726
13727/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13728FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13729{
13730 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13731 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubw_u128);
13732}
13733
13734
13735/* Opcode 0xf2 0x0f 0xf9 - invalid */
13736
13737
13738/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13739FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13740{
13741 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13742 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubd_u64);
13743}
13744
13745
13746/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13747FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13748{
13749 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13750 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubd_u128);
13751}
13752
13753
13754/* Opcode 0xf2 0x0f 0xfa - invalid */
13755
13756
13757/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13758FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13759{
13760 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13761 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
13762}
13763
13764
13765/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13766FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13767{
13768 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13769 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubq_u128);
13770}
13771
13772
13773/* Opcode 0xf2 0x0f 0xfb - invalid */
13774
13775
13776/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13777FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13778{
13779 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13780 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddb_u64);
13781}
13782
13783
13784/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13785FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13786{
13787 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13788 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddb_u128);
13789}
13790
13791
13792/* Opcode 0xf2 0x0f 0xfc - invalid */
13793
13794
13795/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13796FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13797{
13798 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13799 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddw_u64);
13800}
13801
13802
13803/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
13804FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
13805{
13806 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13807 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddw_u128);
13808}
13809
13810
13811/* Opcode 0xf2 0x0f 0xfd - invalid */
13812
13813
13814/** Opcode 0x0f 0xfe - paddd Pq, Qq */
13815FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
13816{
13817 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13818 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddd_u64);
13819}
13820
13821
13822/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
13823FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
13824{
13825 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13826 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddd_u128);
13827}
13828
13829
13830/* Opcode 0xf2 0x0f 0xfe - invalid */
13831
13832
13833/** Opcode **** 0x0f 0xff - UD0 */
13834FNIEMOP_DEF(iemOp_ud0)
13835{
13836 IEMOP_MNEMONIC(ud0, "ud0");
13837 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
13838 {
13839 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
13840 if (IEM_IS_MODRM_MEM_MODE(bRm))
13841 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
13842 }
13843 IEMOP_HLP_DONE_DECODING();
13844 IEMOP_RAISE_INVALID_OPCODE_RET();
13845}
13846
13847
13848
13849/**
13850 * Two byte opcode map, first byte 0x0f.
13851 *
13852 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
13853 * check if it needs updating as well when making changes.
13854 */
13855const PFNIEMOP g_apfnTwoByteMap[] =
13856{
13857 /* no prefix, 066h prefix f3h prefix, f2h prefix */
13858 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
13859 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
13860 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
13861 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
13862 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
13863 /* 0x05 */ IEMOP_X4(iemOp_syscall),
13864 /* 0x06 */ IEMOP_X4(iemOp_clts),
13865 /* 0x07 */ IEMOP_X4(iemOp_sysret),
13866 /* 0x08 */ IEMOP_X4(iemOp_invd),
13867 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
13868 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
13869 /* 0x0b */ IEMOP_X4(iemOp_ud2),
13870 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
13871 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
13872 /* 0x0e */ IEMOP_X4(iemOp_femms),
13873 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
13874
13875 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
13876 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
13877 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
13878 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13879 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13880 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13881 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
13882 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13883 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
13884 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
13885 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
13886 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
13887 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
13888 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
13889 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
13890 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
13891
13892 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
13893 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
13894 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
13895 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
13896 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
13897 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13898 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
13899 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13900 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13901 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13902 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
13903 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13904 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
13905 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
13906 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13907 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13908
13909 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
13910 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
13911 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
13912 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
13913 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
13914 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
13915 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
13916 /* 0x37 */ IEMOP_X4(iemOp_getsec),
13917 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
13918 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13919 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
13920 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13921 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13922 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13923 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13924 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13925
13926 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
13927 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
13928 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
13929 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
13930 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
13931 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
13932 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
13933 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
13934 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
13935 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
13936 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
13937 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
13938 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
13939 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
13940 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
13941 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
13942
13943 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13944 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
13945 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
13946 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
13947 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13948 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13949 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13950 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13951 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
13952 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
13953 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
13954 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
13955 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
13956 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
13957 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
13958 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
13959
13960 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13961 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13962 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13963 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13964 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13965 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13966 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13967 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13968 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13969 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13970 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13971 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13972 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13973 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13974 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13975 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
13976
13977 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
13978 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
13979 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
13980 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
13981 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13982 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13983 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13984 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13985
13986 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13987 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13988 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13989 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13990 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
13991 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
13992 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
13993 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
13994
13995 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
13996 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
13997 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
13998 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
13999 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
14000 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
14001 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
14002 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
14003 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
14004 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
14005 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
14006 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
14007 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14008 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14009 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14010 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14011
14012 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14013 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14014 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14015 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14016 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14017 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14018 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14019 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14020 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14021 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14022 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14023 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14024 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14025 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14026 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14027 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14028
14029 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14030 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14031 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14032 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14033 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14034 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14035 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14036 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14037 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14038 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14039 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14040 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14041 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14042 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14043 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14044 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14045
14046 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14047 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14048 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14049 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14050 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14051 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14052 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14053 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14054 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14055 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14056 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14057 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14058 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14059 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14060 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14061 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14062
14063 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14064 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14065 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14066 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14067 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14068 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14069 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14070 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14071 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14072 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14073 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14074 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14075 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14076 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14077 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14078 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14079
14080 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14081 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14082 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14083 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14084 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14085 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14086 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14087 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14088 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14089 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14090 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14091 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14092 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14093 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14094 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14095 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14096
14097 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14098 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14099 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14100 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14101 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14102 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14103 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14104 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14105 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14106 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14107 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14108 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14109 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14110 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14111 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14112 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14113
14114 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14115 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14116 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14117 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14118 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14119 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14120 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14121 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14122 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14123 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14124 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14125 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14126 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14127 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14128 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14129 /* 0xff */ IEMOP_X4(iemOp_ud0),
14130};
14131AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14132
14133/** @} */
14134
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette