VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h@ 104255

Last change on this file since 104255 was 104207, checked in by vboxsync, 12 months ago

VMM/IEM: Refactoring assembly helpers to not pass eflags by reference but instead by value and return the updated value (via eax/w0) - third chunk: BT,BTC,BTR,BTC. bugref:10376

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 507.6 KB
Line 
1/* $Id: IEMAllInstTwoByte0f.cpp.h 104207 2024-04-05 20:57:55Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 *
42 * The @a pfnU64 worker function takes no FXSAVE state, just the operands.
43 */
44FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
45{
46 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
47 if (IEM_IS_MODRM_REG_MODE(bRm))
48 {
49 /*
50 * MMX, MMX.
51 */
52 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
53 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
54 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
55 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
56 IEM_MC_ARG(uint64_t *, pDst, 0);
57 IEM_MC_ARG(uint64_t const *, pSrc, 1);
58 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
59 IEM_MC_PREPARE_FPU_USAGE();
60 IEM_MC_FPU_TO_MMX_MODE();
61
62 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
63 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
64 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
65 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
66
67 IEM_MC_ADVANCE_RIP_AND_FINISH();
68 IEM_MC_END();
69 }
70 else
71 {
72 /*
73 * MMX, [mem64].
74 */
75 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
76 IEM_MC_ARG(uint64_t *, pDst, 0);
77 IEM_MC_LOCAL(uint64_t, uSrc);
78 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
79 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
80
81 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
82 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
83 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
84 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
85
86 IEM_MC_PREPARE_FPU_USAGE();
87 IEM_MC_FPU_TO_MMX_MODE();
88
89 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
90 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
91 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
92
93 IEM_MC_ADVANCE_RIP_AND_FINISH();
94 IEM_MC_END();
95 }
96}
97
98
99/**
100 * Common worker for MMX instructions on the form:
101 * pxxx mm1, mm2/mem64
102 * for instructions introduced with SSE.
103 *
104 * The @a pfnU64 worker function takes no FXSAVE state, just the operands.
105 */
106FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
107{
108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
109 if (IEM_IS_MODRM_REG_MODE(bRm))
110 {
111 /*
112 * MMX, MMX.
113 */
114 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
115 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
116 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
118 IEM_MC_ARG(uint64_t *, pDst, 0);
119 IEM_MC_ARG(uint64_t const *, pSrc, 1);
120 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
121 IEM_MC_PREPARE_FPU_USAGE();
122 IEM_MC_FPU_TO_MMX_MODE();
123
124 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
125 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
126 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
127 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
128
129 IEM_MC_ADVANCE_RIP_AND_FINISH();
130 IEM_MC_END();
131 }
132 else
133 {
134 /*
135 * MMX, [mem64].
136 */
137 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
138 IEM_MC_ARG(uint64_t *, pDst, 0);
139 IEM_MC_LOCAL(uint64_t, uSrc);
140 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
142
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
145 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
146 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
147
148 IEM_MC_PREPARE_FPU_USAGE();
149 IEM_MC_FPU_TO_MMX_MODE();
150
151 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
152 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
153 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
154
155 IEM_MC_ADVANCE_RIP_AND_FINISH();
156 IEM_MC_END();
157 }
158}
159
160
161/**
162 * Common worker for MMX instructions on the form:
163 * pxxx mm1, mm2/mem64
164 * that was introduced with SSE2.
165 */
166FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
167{
168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
169 if (IEM_IS_MODRM_REG_MODE(bRm))
170 {
171 /*
172 * MMX, MMX.
173 */
174 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
175 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
176 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
178 IEM_MC_ARG(uint64_t *, pDst, 0);
179 IEM_MC_ARG(uint64_t const *, pSrc, 1);
180 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
181 IEM_MC_PREPARE_FPU_USAGE();
182 IEM_MC_FPU_TO_MMX_MODE();
183
184 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
185 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
186 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
187 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
188
189 IEM_MC_ADVANCE_RIP_AND_FINISH();
190 IEM_MC_END();
191 }
192 else
193 {
194 /*
195 * MMX, [mem64].
196 */
197 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
198 IEM_MC_ARG(uint64_t *, pDst, 0);
199 IEM_MC_LOCAL(uint64_t, uSrc);
200 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
202
203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
205 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
206 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
207
208 IEM_MC_PREPARE_FPU_USAGE();
209 IEM_MC_FPU_TO_MMX_MODE();
210
211 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
212 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
213 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
214
215 IEM_MC_ADVANCE_RIP_AND_FINISH();
216 IEM_MC_END();
217 }
218}
219
220
221/**
222 * Common worker for SSE instructions of the form:
223 * pxxx xmm1, xmm2/mem128
224 *
225 * Proper alignment of the 128-bit operand is enforced.
226 * SSE cpuid checks. No SIMD FP exceptions.
227 *
228 * The @a pfnU128 worker function takes no FXSAVE state, just the operands.
229 *
230 * @sa iemOpCommonSse2_FullFull_To_Full
231 */
232FNIEMOP_DEF_1(iemOpCommonSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
233{
234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
235 if (IEM_IS_MODRM_REG_MODE(bRm))
236 {
237 /*
238 * XMM, XMM.
239 */
240 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
242 IEM_MC_ARG(PRTUINT128U, pDst, 0);
243 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
244 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
245 IEM_MC_PREPARE_SSE_USAGE();
246 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
248 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
249 IEM_MC_ADVANCE_RIP_AND_FINISH();
250 IEM_MC_END();
251 }
252 else
253 {
254 /*
255 * XMM, [mem128].
256 */
257 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
258 IEM_MC_ARG(PRTUINT128U, pDst, 0);
259 IEM_MC_LOCAL(RTUINT128U, uSrc);
260 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
262
263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
265 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
266 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
267
268 IEM_MC_PREPARE_SSE_USAGE();
269 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
270 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
271
272 IEM_MC_ADVANCE_RIP_AND_FINISH();
273 IEM_MC_END();
274 }
275}
276
277
278/**
279 * Common worker for SSE2 instructions on the forms:
280 * pxxx xmm1, xmm2/mem128
281 *
282 * Proper alignment of the 128-bit operand is enforced.
283 * Exceptions type 4. SSE2 cpuid checks.
284 *
285 * The @a pfnU128 worker function takes no FXSAVE state, just the operands.
286 *
287 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
288 */
289FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
290{
291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
292 if (IEM_IS_MODRM_REG_MODE(bRm))
293 {
294 /*
295 * XMM, XMM.
296 */
297 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
299 IEM_MC_ARG(PRTUINT128U, pDst, 0);
300 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
302 IEM_MC_PREPARE_SSE_USAGE();
303 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
304 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
305 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
306 IEM_MC_ADVANCE_RIP_AND_FINISH();
307 IEM_MC_END();
308 }
309 else
310 {
311 /*
312 * XMM, [mem128].
313 */
314 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
315 IEM_MC_ARG(PRTUINT128U, pDst, 0);
316 IEM_MC_LOCAL(RTUINT128U, uSrc);
317 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
319
320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
322 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
323 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
324
325 IEM_MC_PREPARE_SSE_USAGE();
326 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
327 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
328
329 IEM_MC_ADVANCE_RIP_AND_FINISH();
330 IEM_MC_END();
331 }
332}
333
334
335/**
336 * Common worker for MMX instructions on the forms:
337 * pxxxx mm1, mm2/mem32
338 *
339 * The 2nd operand is the first half of a register, which in the memory case
340 * means a 32-bit memory access.
341 */
342FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
343{
344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
345 if (IEM_IS_MODRM_REG_MODE(bRm))
346 {
347 /*
348 * MMX, MMX.
349 */
350 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
352 IEM_MC_ARG(uint64_t *, puDst, 0);
353 IEM_MC_ARG(uint64_t const *, puSrc, 1);
354 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
355 IEM_MC_PREPARE_FPU_USAGE();
356 IEM_MC_FPU_TO_MMX_MODE();
357
358 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
359 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
360 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
361 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
362
363 IEM_MC_ADVANCE_RIP_AND_FINISH();
364 IEM_MC_END();
365 }
366 else
367 {
368 /*
369 * MMX, [mem32].
370 */
371 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
372 IEM_MC_ARG(uint64_t *, puDst, 0);
373 IEM_MC_LOCAL(uint64_t, uSrc);
374 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
376
377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
379 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
380 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
381
382 IEM_MC_PREPARE_FPU_USAGE();
383 IEM_MC_FPU_TO_MMX_MODE();
384
385 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
386 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
387 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
388
389 IEM_MC_ADVANCE_RIP_AND_FINISH();
390 IEM_MC_END();
391 }
392}
393
394
395/**
396 * Common worker for SSE instructions on the forms:
397 * pxxxx xmm1, xmm2/mem128
398 *
399 * The 2nd operand is the first half of a register, which in the memory case
400 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
401 *
402 * Exceptions type 4.
403 */
404FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
405{
406 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
407 if (IEM_IS_MODRM_REG_MODE(bRm))
408 {
409 /*
410 * XMM, XMM.
411 */
412 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
414 IEM_MC_ARG(PRTUINT128U, puDst, 0);
415 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
416 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
417 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
418 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
419 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
420 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
421 IEM_MC_ADVANCE_RIP_AND_FINISH();
422 IEM_MC_END();
423 }
424 else
425 {
426 /*
427 * XMM, [mem128].
428 */
429 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
430 IEM_MC_ARG(PRTUINT128U, puDst, 0);
431 IEM_MC_LOCAL(RTUINT128U, uSrc);
432 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
434
435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
437 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
438 /** @todo Most CPUs probably only read the low qword. We read everything to
439 * make sure we apply segmentation and alignment checks correctly.
440 * When we have time, it would be interesting to explore what real
441 * CPUs actually does and whether it will do a TLB load for the high
442 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
443 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
444
445 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
446 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
447 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
448
449 IEM_MC_ADVANCE_RIP_AND_FINISH();
450 IEM_MC_END();
451 }
452}
453
454
455/**
456 * Common worker for SSE2 instructions on the forms:
457 * pxxxx xmm1, xmm2/mem128
458 *
459 * The 2nd operand is the first half of a register, which in the memory case
460 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
461 *
462 * Exceptions type 4.
463 */
464FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
465{
466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
467 if (IEM_IS_MODRM_REG_MODE(bRm))
468 {
469 /*
470 * XMM, XMM.
471 */
472 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
474 IEM_MC_ARG(PRTUINT128U, puDst, 0);
475 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
477 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
478 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
479 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
480 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
481 IEM_MC_ADVANCE_RIP_AND_FINISH();
482 IEM_MC_END();
483 }
484 else
485 {
486 /*
487 * XMM, [mem128].
488 */
489 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
490 IEM_MC_ARG(PRTUINT128U, puDst, 0);
491 IEM_MC_LOCAL(RTUINT128U, uSrc);
492 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
494
495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
498 /** @todo Most CPUs probably only read the low qword. We read everything to
499 * make sure we apply segmentation and alignment checks correctly.
500 * When we have time, it would be interesting to explore what real
501 * CPUs actually does and whether it will do a TLB load for the high
502 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
503 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
504
505 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
506 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
507 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
508
509 IEM_MC_ADVANCE_RIP_AND_FINISH();
510 IEM_MC_END();
511 }
512}
513
514
515/**
516 * Common worker for MMX instructions on the form:
517 * pxxxx mm1, mm2/mem64
518 *
519 * The 2nd operand is the second half of a register, which in the memory case
520 * means a 64-bit memory access for MMX.
521 */
522FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
523{
524 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
525 if (IEM_IS_MODRM_REG_MODE(bRm))
526 {
527 /*
528 * MMX, MMX.
529 */
530 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
531 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
532 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
534 IEM_MC_ARG(uint64_t *, puDst, 0);
535 IEM_MC_ARG(uint64_t const *, puSrc, 1);
536 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
537 IEM_MC_PREPARE_FPU_USAGE();
538 IEM_MC_FPU_TO_MMX_MODE();
539
540 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
541 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
542 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
543 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
544
545 IEM_MC_ADVANCE_RIP_AND_FINISH();
546 IEM_MC_END();
547 }
548 else
549 {
550 /*
551 * MMX, [mem64].
552 */
553 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
554 IEM_MC_ARG(uint64_t *, puDst, 0);
555 IEM_MC_LOCAL(uint64_t, uSrc);
556 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
558
559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
561 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
562 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
563
564 IEM_MC_PREPARE_FPU_USAGE();
565 IEM_MC_FPU_TO_MMX_MODE();
566
567 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
568 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
569 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
570
571 IEM_MC_ADVANCE_RIP_AND_FINISH();
572 IEM_MC_END();
573 }
574}
575
576
577/**
578 * Common worker for SSE instructions on the form:
579 * pxxxx xmm1, xmm2/mem128
580 *
581 * The 2nd operand is the second half of a register, which for SSE a 128-bit
582 * aligned access where it may read the full 128 bits or only the upper 64 bits.
583 *
584 * Exceptions type 4.
585 */
586FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
587{
588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
589 if (IEM_IS_MODRM_REG_MODE(bRm))
590 {
591 /*
592 * XMM, XMM.
593 */
594 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
596 IEM_MC_ARG(PRTUINT128U, puDst, 0);
597 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
598 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
599 IEM_MC_PREPARE_SSE_USAGE();
600 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
601 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
602 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
603 IEM_MC_ADVANCE_RIP_AND_FINISH();
604 IEM_MC_END();
605 }
606 else
607 {
608 /*
609 * XMM, [mem128].
610 */
611 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
612 IEM_MC_ARG(PRTUINT128U, puDst, 0);
613 IEM_MC_LOCAL(RTUINT128U, uSrc);
614 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
616
617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
619 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
620 /** @todo Most CPUs probably only read the high qword. We read everything to
621 * make sure we apply segmentation and alignment checks correctly.
622 * When we have time, it would be interesting to explore what real
623 * CPUs actually does and whether it will do a TLB load for the lower
624 * part or skip any associated \#PF. */
625 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
626
627 IEM_MC_PREPARE_SSE_USAGE();
628 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
629 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
630
631 IEM_MC_ADVANCE_RIP_AND_FINISH();
632 IEM_MC_END();
633 }
634}
635
636
637/**
638 * Common worker for SSE instructions on the forms:
639 * pxxs xmm1, xmm2/mem128
640 *
641 * Proper alignment of the 128-bit operand is enforced.
642 * Exceptions type 2. SSE cpuid checks.
643 *
644 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
645 */
646FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
647{
648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
649 if (IEM_IS_MODRM_REG_MODE(bRm))
650 {
651 /*
652 * XMM128, XMM128.
653 */
654 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
656 IEM_MC_LOCAL(X86XMMREG, SseRes);
657 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
658 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
659 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
660 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
661 IEM_MC_PREPARE_SSE_USAGE();
662 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
663 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
664 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
665 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
666 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
667
668 IEM_MC_ADVANCE_RIP_AND_FINISH();
669 IEM_MC_END();
670 }
671 else
672 {
673 /*
674 * XMM128, [mem128].
675 */
676 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
677 IEM_MC_LOCAL(X86XMMREG, SseRes);
678 IEM_MC_LOCAL(X86XMMREG, uSrc2);
679 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
680 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
681 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
683
684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
686 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
687 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
688
689 IEM_MC_PREPARE_SSE_USAGE();
690 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
691 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
692 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
693 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
694
695 IEM_MC_ADVANCE_RIP_AND_FINISH();
696 IEM_MC_END();
697 }
698}
699
700
701/**
702 * Common worker for SSE instructions on the forms:
703 * pxxs xmm1, xmm2/mem32
704 *
705 * Proper alignment of the 128-bit operand is enforced.
706 * Exceptions type 2. SSE cpuid checks.
707 *
708 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
709 */
710FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
711{
712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
713 if (IEM_IS_MODRM_REG_MODE(bRm))
714 {
715 /*
716 * XMM128, XMM32.
717 */
718 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
720 IEM_MC_LOCAL(X86XMMREG, SseRes);
721 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
722 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
723 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
724 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
725 IEM_MC_PREPARE_SSE_USAGE();
726 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
727 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
728 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
729 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
730 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
731
732 IEM_MC_ADVANCE_RIP_AND_FINISH();
733 IEM_MC_END();
734 }
735 else
736 {
737 /*
738 * XMM128, [mem32].
739 */
740 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
741 IEM_MC_LOCAL(X86XMMREG, SseRes);
742 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
743 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
744 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
745 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
747
748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
750 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
751 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
752
753 IEM_MC_PREPARE_SSE_USAGE();
754 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
755 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
756 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
757 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
758
759 IEM_MC_ADVANCE_RIP_AND_FINISH();
760 IEM_MC_END();
761 }
762}
763
764
765/**
766 * Common worker for SSE2 instructions on the forms:
767 * pxxd xmm1, xmm2/mem128
768 *
769 * Proper alignment of the 128-bit operand is enforced.
770 * Exceptions type 2. SSE cpuid checks.
771 *
772 * @sa iemOpCommonSseFp_FullFull_To_Full
773 */
774FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
775{
776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
777 if (IEM_IS_MODRM_REG_MODE(bRm))
778 {
779 /*
780 * XMM128, XMM128.
781 */
782 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
784 IEM_MC_LOCAL(X86XMMREG, SseRes);
785 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
786 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
787 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
788 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
789 IEM_MC_PREPARE_SSE_USAGE();
790 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
791 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
792 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
793 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
794 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
795
796 IEM_MC_ADVANCE_RIP_AND_FINISH();
797 IEM_MC_END();
798 }
799 else
800 {
801 /*
802 * XMM128, [mem128].
803 */
804 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
805 IEM_MC_LOCAL(X86XMMREG, SseRes);
806 IEM_MC_LOCAL(X86XMMREG, uSrc2);
807 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
808 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
809 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
811
812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
814 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
815 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
816
817 IEM_MC_PREPARE_SSE_USAGE();
818 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
819 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
820 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
821 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
822
823 IEM_MC_ADVANCE_RIP_AND_FINISH();
824 IEM_MC_END();
825 }
826}
827
828
829/**
830 * Common worker for SSE2 instructions on the forms:
831 * pxxs xmm1, xmm2/mem64
832 *
833 * Proper alignment of the 128-bit operand is enforced.
834 * Exceptions type 2. SSE2 cpuid checks.
835 *
836 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
837 */
838FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
839{
840 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
841 if (IEM_IS_MODRM_REG_MODE(bRm))
842 {
843 /*
844 * XMM, XMM.
845 */
846 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
848 IEM_MC_LOCAL(X86XMMREG, SseRes);
849 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
850 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
851 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
852 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
853 IEM_MC_PREPARE_SSE_USAGE();
854 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
855 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
856 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
857 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
858 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
859
860 IEM_MC_ADVANCE_RIP_AND_FINISH();
861 IEM_MC_END();
862 }
863 else
864 {
865 /*
866 * XMM, [mem64].
867 */
868 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
869 IEM_MC_LOCAL(X86XMMREG, SseRes);
870 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
871 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
872 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
873 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
875
876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
878 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
879 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
880
881 IEM_MC_PREPARE_SSE_USAGE();
882 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
883 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
884 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
885 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
886
887 IEM_MC_ADVANCE_RIP_AND_FINISH();
888 IEM_MC_END();
889 }
890}
891
892
893/**
894 * Common worker for SSE2 instructions on the form:
895 * pxxxx xmm1, xmm2/mem128
896 *
897 * The 2nd operand is the second half of a register, which for SSE a 128-bit
898 * aligned access where it may read the full 128 bits or only the upper 64 bits.
899 *
900 * Exceptions type 4.
901 */
902FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
903{
904 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
905 if (IEM_IS_MODRM_REG_MODE(bRm))
906 {
907 /*
908 * XMM, XMM.
909 */
910 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
912 IEM_MC_ARG(PRTUINT128U, puDst, 0);
913 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
914 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
915 IEM_MC_PREPARE_SSE_USAGE();
916 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
917 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
918 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
919 IEM_MC_ADVANCE_RIP_AND_FINISH();
920 IEM_MC_END();
921 }
922 else
923 {
924 /*
925 * XMM, [mem128].
926 */
927 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
928 IEM_MC_ARG(PRTUINT128U, puDst, 0);
929 IEM_MC_LOCAL(RTUINT128U, uSrc);
930 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
932
933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
935 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
936 /** @todo Most CPUs probably only read the high qword. We read everything to
937 * make sure we apply segmentation and alignment checks correctly.
938 * When we have time, it would be interesting to explore what real
939 * CPUs actually does and whether it will do a TLB load for the lower
940 * part or skip any associated \#PF. */
941 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
942
943 IEM_MC_PREPARE_SSE_USAGE();
944 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
945 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
946
947 IEM_MC_ADVANCE_RIP_AND_FINISH();
948 IEM_MC_END();
949 }
950}
951
952
953/**
954 * Common worker for SSE3 instructions on the forms:
955 * hxxx xmm1, xmm2/mem128
956 *
957 * Proper alignment of the 128-bit operand is enforced.
958 * Exceptions type 2. SSE3 cpuid checks.
959 *
960 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
961 */
962FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
963{
964 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
965 if (IEM_IS_MODRM_REG_MODE(bRm))
966 {
967 /*
968 * XMM, XMM.
969 */
970 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
972 IEM_MC_LOCAL(X86XMMREG, SseRes);
973 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
974 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
975 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
976 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
977 IEM_MC_PREPARE_SSE_USAGE();
978 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
979 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
980 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
981 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
982 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
983
984 IEM_MC_ADVANCE_RIP_AND_FINISH();
985 IEM_MC_END();
986 }
987 else
988 {
989 /*
990 * XMM, [mem128].
991 */
992 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
993 IEM_MC_LOCAL(X86XMMREG, SseRes);
994 IEM_MC_LOCAL(X86XMMREG, uSrc2);
995 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
996 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
997 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
999
1000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1002 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1003 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1004
1005 IEM_MC_PREPARE_SSE_USAGE();
1006 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1007 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1008 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1009 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
1010
1011 IEM_MC_ADVANCE_RIP_AND_FINISH();
1012 IEM_MC_END();
1013 }
1014}
1015
1016
1017/** Opcode 0x0f 0x00 /0. */
1018FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1019{
1020 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1021 IEMOP_HLP_MIN_286();
1022 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1023
1024 if (IEM_IS_MODRM_REG_MODE(bRm))
1025 {
1026 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1027 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1028 iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1029 }
1030
1031 /* Ignore operand size here, memory refs are always 16-bit. */
1032 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1033 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1035 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1036 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1037 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1038 IEM_MC_END();
1039}
1040
1041
1042/** Opcode 0x0f 0x00 /1. */
1043FNIEMOPRM_DEF(iemOp_Grp6_str)
1044{
1045 IEMOP_MNEMONIC(str, "str Rv/Mw");
1046 IEMOP_HLP_MIN_286();
1047 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1048
1049
1050 if (IEM_IS_MODRM_REG_MODE(bRm))
1051 {
1052 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1053 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1054 iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1055 }
1056
1057 /* Ignore operand size here, memory refs are always 16-bit. */
1058 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1059 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1061 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1062 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1063 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1064 IEM_MC_END();
1065}
1066
1067
1068/** Opcode 0x0f 0x00 /2. */
1069FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1070{
1071 IEMOP_MNEMONIC(lldt, "lldt Ew");
1072 IEMOP_HLP_MIN_286();
1073 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1074
1075 if (IEM_IS_MODRM_REG_MODE(bRm))
1076 {
1077 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1078 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1079 IEM_MC_ARG(uint16_t, u16Sel, 0);
1080 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1081 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1082 IEM_MC_END();
1083 }
1084 else
1085 {
1086 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1087 IEM_MC_ARG(uint16_t, u16Sel, 0);
1088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1090 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1091 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1092 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1093 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1094 IEM_MC_END();
1095 }
1096}
1097
1098
1099/** Opcode 0x0f 0x00 /3. */
1100FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1101{
1102 IEMOP_MNEMONIC(ltr, "ltr Ew");
1103 IEMOP_HLP_MIN_286();
1104 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1105
1106 if (IEM_IS_MODRM_REG_MODE(bRm))
1107 {
1108 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1110 IEM_MC_ARG(uint16_t, u16Sel, 0);
1111 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1112 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1113 IEM_MC_END();
1114 }
1115 else
1116 {
1117 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1118 IEM_MC_ARG(uint16_t, u16Sel, 0);
1119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1122 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1123 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1124 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1125 IEM_MC_END();
1126 }
1127}
1128
1129
1130/* Need to associate flag info with the blocks, so duplicate the code. */
1131#define IEMOP_BODY_GRP6_VERX(bRm, fWrite) \
1132 IEMOP_HLP_MIN_286(); \
1133 IEMOP_HLP_NO_REAL_OR_V86_MODE(); \
1134 \
1135 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1136 { \
1137 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0); \
1138 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1139 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1140 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1141 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1142 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1143 IEM_MC_END(); \
1144 } \
1145 else \
1146 { \
1147 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0); \
1148 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1149 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1152 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1153 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1154 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1155 IEM_MC_END(); \
1156 } (void)0
1157
1158/**
1159 * @opmaps grp6
1160 * @opcode /4
1161 * @opflmodify zf
1162 */
1163FNIEMOPRM_DEF(iemOp_Grp6_verr)
1164{
1165 IEMOP_MNEMONIC(verr, "verr Ew");
1166 IEMOP_BODY_GRP6_VERX(bRm, false);
1167}
1168
1169
1170/**
1171 * @opmaps grp6
1172 * @opcode /5
1173 * @opflmodify zf
1174 */
1175FNIEMOPRM_DEF(iemOp_Grp6_verw)
1176{
1177 IEMOP_MNEMONIC(verw, "verw Ew");
1178 IEMOP_BODY_GRP6_VERX(bRm, true);
1179}
1180
1181
1182/**
1183 * Group 6 jump table.
1184 */
1185IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1186{
1187 iemOp_Grp6_sldt,
1188 iemOp_Grp6_str,
1189 iemOp_Grp6_lldt,
1190 iemOp_Grp6_ltr,
1191 iemOp_Grp6_verr,
1192 iemOp_Grp6_verw,
1193 iemOp_InvalidWithRM,
1194 iemOp_InvalidWithRM
1195};
1196
1197/** Opcode 0x0f 0x00. */
1198FNIEMOP_DEF(iemOp_Grp6)
1199{
1200 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1201 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1202}
1203
1204
1205/** Opcode 0x0f 0x01 /0. */
1206FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1207{
1208 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1209 IEMOP_HLP_MIN_286();
1210 IEMOP_HLP_64BIT_OP_SIZE();
1211 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1212 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1215 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1216 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1217 IEM_MC_END();
1218}
1219
1220
1221/** Opcode 0x0f 0x01 /0. */
1222FNIEMOP_DEF(iemOp_Grp7_vmcall)
1223{
1224 IEMOP_MNEMONIC(vmcall, "vmcall");
1225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1226
1227 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1228 want all hypercalls regardless of instruction used, and if a
1229 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1230 (NEM/win makes ASSUMPTIONS about this behavior.) */
1231 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmcall);
1232}
1233
1234
1235/** Opcode 0x0f 0x01 /0. */
1236#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1237FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1238{
1239 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1240 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1241 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1242 IEMOP_HLP_DONE_DECODING();
1243 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1244 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1245 iemCImpl_vmlaunch);
1246}
1247#else
1248FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1249{
1250 IEMOP_BITCH_ABOUT_STUB();
1251 IEMOP_RAISE_INVALID_OPCODE_RET();
1252}
1253#endif
1254
1255
1256/** Opcode 0x0f 0x01 /0. */
1257#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1258FNIEMOP_DEF(iemOp_Grp7_vmresume)
1259{
1260 IEMOP_MNEMONIC(vmresume, "vmresume");
1261 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1262 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1263 IEMOP_HLP_DONE_DECODING();
1264 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1265 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1266 iemCImpl_vmresume);
1267}
1268#else
1269FNIEMOP_DEF(iemOp_Grp7_vmresume)
1270{
1271 IEMOP_BITCH_ABOUT_STUB();
1272 IEMOP_RAISE_INVALID_OPCODE_RET();
1273}
1274#endif
1275
1276
1277/** Opcode 0x0f 0x01 /0. */
1278#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1279FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1280{
1281 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1282 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1283 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1284 IEMOP_HLP_DONE_DECODING();
1285 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmxoff);
1286}
1287#else
1288FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1289{
1290 IEMOP_BITCH_ABOUT_STUB();
1291 IEMOP_RAISE_INVALID_OPCODE_RET();
1292}
1293#endif
1294
1295
1296/** Opcode 0x0f 0x01 /1. */
1297FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1298{
1299 IEMOP_MNEMONIC(sidt, "sidt Ms");
1300 IEMOP_HLP_MIN_286();
1301 IEMOP_HLP_64BIT_OP_SIZE();
1302 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1303 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1306 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1307 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1308 IEM_MC_END();
1309}
1310
1311
1312/** Opcode 0x0f 0x01 /1. */
1313FNIEMOP_DEF(iemOp_Grp7_monitor)
1314{
1315 IEMOP_MNEMONIC(monitor, "monitor");
1316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1317 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1318}
1319
1320
1321/** Opcode 0x0f 0x01 /1. */
1322FNIEMOP_DEF(iemOp_Grp7_mwait)
1323{
1324 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1326 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_mwait);
1327}
1328
1329
1330/** Opcode 0x0f 0x01 /2. */
1331FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1332{
1333 IEMOP_MNEMONIC(lgdt, "lgdt");
1334 IEMOP_HLP_64BIT_OP_SIZE();
1335 IEM_MC_BEGIN(0, 0);
1336 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1339 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1340 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1341 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1342 IEM_MC_END();
1343}
1344
1345
1346/** Opcode 0x0f 0x01 0xd0. */
1347FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1348{
1349 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1350 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1351 {
1352 /** @todo r=ramshankar: We should use
1353 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1354 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1355/** @todo testcase: test prefixes and exceptions. currently not checking for the
1356 * OPSIZE one ... */
1357 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1358 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
1359 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1360 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
1361 iemCImpl_xgetbv);
1362 }
1363 IEMOP_RAISE_INVALID_OPCODE_RET();
1364}
1365
1366
1367/** Opcode 0x0f 0x01 0xd1. */
1368FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1369{
1370 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1371 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1372 {
1373 /** @todo r=ramshankar: We should use
1374 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1375 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1376/** @todo testcase: test prefixes and exceptions. currently not checking for the
1377 * OPSIZE one ... */
1378 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1379 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_xsetbv);
1380 }
1381 IEMOP_RAISE_INVALID_OPCODE_RET();
1382}
1383
1384
1385/** Opcode 0x0f 0x01 /3. */
1386FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1387{
1388 IEMOP_MNEMONIC(lidt, "lidt");
1389 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1390 IEM_MC_BEGIN(0, 0);
1391 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1394 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1395 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg, /*=*/ enmEffOpSize, 2);
1396 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1397 IEM_MC_END();
1398}
1399
1400
1401/** Opcode 0x0f 0x01 0xd8. */
1402#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1403FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1404{
1405 IEMOP_MNEMONIC(vmrun, "vmrun");
1406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1407 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1408 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1409 iemCImpl_vmrun);
1410}
1411#else
1412FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1413#endif
1414
1415/** Opcode 0x0f 0x01 0xd9. */
1416FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1417{
1418 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1419 /** @todo r=bird: Table A-8 on page 524 in vol 3 has VMGEXIT for this
1420 * opcode sequence when F3 or F2 is used as prefix. So, the assumtion
1421 * here cannot be right... */
1422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1423
1424 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1425 want all hypercalls regardless of instruction used, and if a
1426 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1427 (NEM/win makes ASSUMPTIONS about this behavior.) */
1428 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmmcall);
1429}
1430
1431/** Opcode 0x0f 0x01 0xda. */
1432#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1433FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1434{
1435 IEMOP_MNEMONIC(vmload, "vmload");
1436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1437 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmload);
1438}
1439#else
1440FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1441#endif
1442
1443
1444/** Opcode 0x0f 0x01 0xdb. */
1445#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1446FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1447{
1448 IEMOP_MNEMONIC(vmsave, "vmsave");
1449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1450 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmsave);
1451}
1452#else
1453FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1454#endif
1455
1456
1457/** Opcode 0x0f 0x01 0xdc. */
1458#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1459FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1460{
1461 IEMOP_MNEMONIC(stgi, "stgi");
1462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1463 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_stgi);
1464}
1465#else
1466FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1467#endif
1468
1469
1470/** Opcode 0x0f 0x01 0xdd. */
1471#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1472FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1473{
1474 IEMOP_MNEMONIC(clgi, "clgi");
1475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1476 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clgi);
1477}
1478#else
1479FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1480#endif
1481
1482
1483/** Opcode 0x0f 0x01 0xdf. */
1484#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1485FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1486{
1487 IEMOP_MNEMONIC(invlpga, "invlpga");
1488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1489 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpga);
1490}
1491#else
1492FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1493#endif
1494
1495
1496/** Opcode 0x0f 0x01 0xde. */
1497#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1498FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1499{
1500 IEMOP_MNEMONIC(skinit, "skinit");
1501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1502 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_skinit);
1503}
1504#else
1505FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1506#endif
1507
1508
1509/** Opcode 0x0f 0x01 /4. */
1510FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1511{
1512 IEMOP_MNEMONIC(smsw, "smsw");
1513 IEMOP_HLP_MIN_286();
1514 if (IEM_IS_MODRM_REG_MODE(bRm))
1515 {
1516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1517 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1518 iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1519 }
1520
1521 /* Ignore operand size here, memory refs are always 16-bit. */
1522 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1523 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1526 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1527 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1528 IEM_MC_END();
1529}
1530
1531
1532/** Opcode 0x0f 0x01 /6. */
1533FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1534{
1535 /* The operand size is effectively ignored, all is 16-bit and only the
1536 lower 3-bits are used. */
1537 IEMOP_MNEMONIC(lmsw, "lmsw");
1538 IEMOP_HLP_MIN_286();
1539 if (IEM_IS_MODRM_REG_MODE(bRm))
1540 {
1541 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1543 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1544 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1545 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1546 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1547 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1548 IEM_MC_END();
1549 }
1550 else
1551 {
1552 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1553 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1554 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1557 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1558 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1559 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1560 IEM_MC_END();
1561 }
1562}
1563
1564
1565/** Opcode 0x0f 0x01 /7. */
1566FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1567{
1568 IEMOP_MNEMONIC(invlpg, "invlpg");
1569 IEMOP_HLP_MIN_486();
1570 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1571 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1574 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpg, GCPtrEffDst);
1575 IEM_MC_END();
1576}
1577
1578
1579/** Opcode 0x0f 0x01 0xf8. */
1580FNIEMOP_DEF(iemOp_Grp7_swapgs)
1581{
1582 IEMOP_MNEMONIC(swapgs, "swapgs");
1583 IEMOP_HLP_ONLY_64BIT();
1584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1585 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS), iemCImpl_swapgs);
1586}
1587
1588
1589/** Opcode 0x0f 0x01 0xf9. */
1590FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1591{
1592 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1594 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
1595 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1596 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
1597 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
1598 iemCImpl_rdtscp);
1599}
1600
1601
1602/**
1603 * Group 7 jump table, memory variant.
1604 */
1605IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1606{
1607 iemOp_Grp7_sgdt,
1608 iemOp_Grp7_sidt,
1609 iemOp_Grp7_lgdt,
1610 iemOp_Grp7_lidt,
1611 iemOp_Grp7_smsw,
1612 iemOp_InvalidWithRM,
1613 iemOp_Grp7_lmsw,
1614 iemOp_Grp7_invlpg
1615};
1616
1617
1618/** Opcode 0x0f 0x01. */
1619FNIEMOP_DEF(iemOp_Grp7)
1620{
1621 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1622 if (IEM_IS_MODRM_MEM_MODE(bRm))
1623 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1624
1625 switch (IEM_GET_MODRM_REG_8(bRm))
1626 {
1627 case 0:
1628 switch (IEM_GET_MODRM_RM_8(bRm))
1629 {
1630 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1631 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1632 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1633 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1634 }
1635 IEMOP_RAISE_INVALID_OPCODE_RET();
1636
1637 case 1:
1638 switch (IEM_GET_MODRM_RM_8(bRm))
1639 {
1640 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1641 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1642 }
1643 IEMOP_RAISE_INVALID_OPCODE_RET();
1644
1645 case 2:
1646 switch (IEM_GET_MODRM_RM_8(bRm))
1647 {
1648 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1649 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1650 }
1651 IEMOP_RAISE_INVALID_OPCODE_RET();
1652
1653 case 3:
1654 switch (IEM_GET_MODRM_RM_8(bRm))
1655 {
1656 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1657 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1658 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1659 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1660 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1661 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1662 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1663 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1665 }
1666
1667 case 4:
1668 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1669
1670 case 5:
1671 IEMOP_RAISE_INVALID_OPCODE_RET();
1672
1673 case 6:
1674 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1675
1676 case 7:
1677 switch (IEM_GET_MODRM_RM_8(bRm))
1678 {
1679 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1680 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1681 }
1682 IEMOP_RAISE_INVALID_OPCODE_RET();
1683
1684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1685 }
1686}
1687
1688FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1689{
1690 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1691 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1692
1693 if (IEM_IS_MODRM_REG_MODE(bRm))
1694 {
1695 switch (pVCpu->iem.s.enmEffOpSize)
1696 {
1697 case IEMMODE_16BIT:
1698 IEM_MC_BEGIN(0, 0);
1699 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1700 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1701 IEM_MC_ARG(uint16_t, u16Sel, 1);
1702 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1703
1704 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1705 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1706 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1707 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1708
1709 IEM_MC_END();
1710 break;
1711
1712 case IEMMODE_32BIT:
1713 case IEMMODE_64BIT:
1714 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1715 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1716 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1717 IEM_MC_ARG(uint16_t, u16Sel, 1);
1718 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1719
1720 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1721 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1722 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1723 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1724
1725 IEM_MC_END();
1726 break;
1727
1728 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1729 }
1730 }
1731 else
1732 {
1733 switch (pVCpu->iem.s.enmEffOpSize)
1734 {
1735 case IEMMODE_16BIT:
1736 IEM_MC_BEGIN(0, 0);
1737 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1738 IEM_MC_ARG(uint16_t, u16Sel, 1);
1739 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1741
1742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1743 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1744
1745 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1746 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1747 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1748 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1749
1750 IEM_MC_END();
1751 break;
1752
1753 case IEMMODE_32BIT:
1754 case IEMMODE_64BIT:
1755 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1756 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1757 IEM_MC_ARG(uint16_t, u16Sel, 1);
1758 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1760
1761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1762 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1763/** @todo testcase: make sure it's a 16-bit read. */
1764
1765 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1766 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1767 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1768 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1769
1770 IEM_MC_END();
1771 break;
1772
1773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1774 }
1775 }
1776}
1777
1778
1779
1780/**
1781 * @opcode 0x02
1782 * @opflmodify zf
1783 */
1784FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1785{
1786 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1787 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1788}
1789
1790
1791/**
1792 * @opcode 0x03
1793 * @opflmodify zf
1794 */
1795FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1796{
1797 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1798 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1799}
1800
1801
1802/** Opcode 0x0f 0x05. */
1803FNIEMOP_DEF(iemOp_syscall)
1804{
1805 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1807 /** @todo r=aeichner Clobbers cr0 only if this is a 286 LOADALL instruction. */
1808 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1809 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1810 RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_syscall);
1811}
1812
1813
1814/** Opcode 0x0f 0x06. */
1815FNIEMOP_DEF(iemOp_clts)
1816{
1817 IEMOP_MNEMONIC(clts, "clts");
1818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1819 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_clts);
1820}
1821
1822
1823/** Opcode 0x0f 0x07. */
1824FNIEMOP_DEF(iemOp_sysret)
1825{
1826 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1828 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1829 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
1830 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
1831}
1832
1833
1834/** Opcode 0x0f 0x08. */
1835FNIEMOP_DEF(iemOp_invd)
1836{
1837 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1838 IEMOP_HLP_MIN_486();
1839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1840 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invd);
1841}
1842
1843
1844/** Opcode 0x0f 0x09. */
1845FNIEMOP_DEF(iemOp_wbinvd)
1846{
1847 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1848 IEMOP_HLP_MIN_486();
1849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1850 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wbinvd);
1851}
1852
1853
1854/** Opcode 0x0f 0x0b. */
1855FNIEMOP_DEF(iemOp_ud2)
1856{
1857 IEMOP_MNEMONIC(ud2, "ud2");
1858 IEMOP_RAISE_INVALID_OPCODE_RET();
1859}
1860
1861/** Opcode 0x0f 0x0d. */
1862FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1863{
1864 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1865 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1866 {
1867 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1868 IEMOP_RAISE_INVALID_OPCODE_RET();
1869 }
1870
1871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1872 if (IEM_IS_MODRM_REG_MODE(bRm))
1873 {
1874 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1875 IEMOP_RAISE_INVALID_OPCODE_RET();
1876 }
1877
1878 switch (IEM_GET_MODRM_REG_8(bRm))
1879 {
1880 case 2: /* Aliased to /0 for the time being. */
1881 case 4: /* Aliased to /0 for the time being. */
1882 case 5: /* Aliased to /0 for the time being. */
1883 case 6: /* Aliased to /0 for the time being. */
1884 case 7: /* Aliased to /0 for the time being. */
1885 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1886 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1887 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1889 }
1890
1891 IEM_MC_BEGIN(0, 0);
1892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1895 /* Currently a NOP. */
1896 IEM_MC_NOREF(GCPtrEffSrc);
1897 IEM_MC_ADVANCE_RIP_AND_FINISH();
1898 IEM_MC_END();
1899}
1900
1901
1902/** Opcode 0x0f 0x0e. */
1903FNIEMOP_DEF(iemOp_femms)
1904{
1905 IEMOP_MNEMONIC(femms, "femms");
1906
1907 IEM_MC_BEGIN(0, 0);
1908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1909 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
1910 IEM_MC_MAYBE_RAISE_FPU_XCPT();
1911 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
1912 IEM_MC_FPU_FROM_MMX_MODE();
1913 IEM_MC_ADVANCE_RIP_AND_FINISH();
1914 IEM_MC_END();
1915}
1916
1917
1918/** Opcode 0x0f 0x0f. */
1919FNIEMOP_DEF(iemOp_3Dnow)
1920{
1921 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1922 {
1923 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1924 IEMOP_RAISE_INVALID_OPCODE_RET();
1925 }
1926
1927#ifdef IEM_WITH_3DNOW
1928 /* This is pretty sparse, use switch instead of table. */
1929 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1930 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
1931#else
1932 IEMOP_BITCH_ABOUT_STUB();
1933 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1934#endif
1935}
1936
1937
1938/**
1939 * @opcode 0x10
1940 * @oppfx none
1941 * @opcpuid sse
1942 * @opgroup og_sse_simdfp_datamove
1943 * @opxcpttype 4UA
1944 * @optest op1=1 op2=2 -> op1=2
1945 * @optest op1=0 op2=-22 -> op1=-22
1946 */
1947FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1948{
1949 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1951 if (IEM_IS_MODRM_REG_MODE(bRm))
1952 {
1953 /*
1954 * XMM128, XMM128.
1955 */
1956 IEM_MC_BEGIN(0, 0);
1957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
1958 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1959 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1960 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
1961 IEM_GET_MODRM_RM(pVCpu, bRm));
1962 IEM_MC_ADVANCE_RIP_AND_FINISH();
1963 IEM_MC_END();
1964 }
1965 else
1966 {
1967 /*
1968 * XMM128, [mem128].
1969 */
1970 IEM_MC_BEGIN(0, 0);
1971 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1973
1974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
1976 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1977 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1978
1979 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1980 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1981
1982 IEM_MC_ADVANCE_RIP_AND_FINISH();
1983 IEM_MC_END();
1984 }
1985
1986}
1987
1988
1989/**
1990 * @opcode 0x10
1991 * @oppfx 0x66
1992 * @opcpuid sse2
1993 * @opgroup og_sse2_pcksclr_datamove
1994 * @opxcpttype 4UA
1995 * @optest op1=1 op2=2 -> op1=2
1996 * @optest op1=0 op2=-42 -> op1=-42
1997 */
1998FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1999{
2000 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2001 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2002 if (IEM_IS_MODRM_REG_MODE(bRm))
2003 {
2004 /*
2005 * XMM128, XMM128.
2006 */
2007 IEM_MC_BEGIN(0, 0);
2008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2009 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2010 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2011 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2012 IEM_GET_MODRM_RM(pVCpu, bRm));
2013 IEM_MC_ADVANCE_RIP_AND_FINISH();
2014 IEM_MC_END();
2015 }
2016 else
2017 {
2018 /*
2019 * XMM128, [mem128].
2020 */
2021 IEM_MC_BEGIN(0, 0);
2022 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2024
2025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2027 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2028 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2029
2030 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2031 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2032
2033 IEM_MC_ADVANCE_RIP_AND_FINISH();
2034 IEM_MC_END();
2035 }
2036}
2037
2038
2039/**
2040 * @opcode 0x10
2041 * @oppfx 0xf3
2042 * @opcpuid sse
2043 * @opgroup og_sse_simdfp_datamove
2044 * @opxcpttype 5
2045 * @optest op1=1 op2=2 -> op1=2
2046 * @optest op1=0 op2=-22 -> op1=-22
2047 */
2048FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2049{
2050 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2052 if (IEM_IS_MODRM_REG_MODE(bRm))
2053 {
2054 /*
2055 * XMM32, XMM32.
2056 */
2057 IEM_MC_BEGIN(0, 0);
2058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2059 IEM_MC_LOCAL(uint32_t, uSrc);
2060
2061 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2062 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2063 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2064 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2065
2066 IEM_MC_ADVANCE_RIP_AND_FINISH();
2067 IEM_MC_END();
2068 }
2069 else
2070 {
2071 /*
2072 * XMM128, [mem32].
2073 */
2074 IEM_MC_BEGIN(0, 0);
2075 IEM_MC_LOCAL(uint32_t, uSrc);
2076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2077
2078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2080 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2081 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2082
2083 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2084 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2085
2086 IEM_MC_ADVANCE_RIP_AND_FINISH();
2087 IEM_MC_END();
2088 }
2089}
2090
2091
2092/**
2093 * @opcode 0x10
2094 * @oppfx 0xf2
2095 * @opcpuid sse2
2096 * @opgroup og_sse2_pcksclr_datamove
2097 * @opxcpttype 5
2098 * @optest op1=1 op2=2 -> op1=2
2099 * @optest op1=0 op2=-42 -> op1=-42
2100 */
2101FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2102{
2103 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2105 if (IEM_IS_MODRM_REG_MODE(bRm))
2106 {
2107 /*
2108 * XMM64, XMM64.
2109 */
2110 IEM_MC_BEGIN(0, 0);
2111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2112 IEM_MC_LOCAL(uint64_t, uSrc);
2113
2114 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2115 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2116 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2117 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2118
2119 IEM_MC_ADVANCE_RIP_AND_FINISH();
2120 IEM_MC_END();
2121 }
2122 else
2123 {
2124 /*
2125 * XMM128, [mem64].
2126 */
2127 IEM_MC_BEGIN(0, 0);
2128 IEM_MC_LOCAL(uint64_t, uSrc);
2129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2130
2131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2133 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2134 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2135
2136 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2137 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2138
2139 IEM_MC_ADVANCE_RIP_AND_FINISH();
2140 IEM_MC_END();
2141 }
2142}
2143
2144
2145/**
2146 * @opcode 0x11
2147 * @oppfx none
2148 * @opcpuid sse
2149 * @opgroup og_sse_simdfp_datamove
2150 * @opxcpttype 4UA
2151 * @optest op1=1 op2=2 -> op1=2
2152 * @optest op1=0 op2=-42 -> op1=-42
2153 */
2154FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2155{
2156 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2157 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2158 if (IEM_IS_MODRM_REG_MODE(bRm))
2159 {
2160 /*
2161 * XMM128, XMM128.
2162 */
2163 IEM_MC_BEGIN(0, 0);
2164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2165 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2166 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2167 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2168 IEM_GET_MODRM_REG(pVCpu, bRm));
2169 IEM_MC_ADVANCE_RIP_AND_FINISH();
2170 IEM_MC_END();
2171 }
2172 else
2173 {
2174 /*
2175 * [mem128], XMM128.
2176 */
2177 IEM_MC_BEGIN(0, 0);
2178 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2180
2181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2183 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2184 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2185
2186 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2187 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2188
2189 IEM_MC_ADVANCE_RIP_AND_FINISH();
2190 IEM_MC_END();
2191 }
2192}
2193
2194
2195/**
2196 * @opcode 0x11
2197 * @oppfx 0x66
2198 * @opcpuid sse2
2199 * @opgroup og_sse2_pcksclr_datamove
2200 * @opxcpttype 4UA
2201 * @optest op1=1 op2=2 -> op1=2
2202 * @optest op1=0 op2=-42 -> op1=-42
2203 */
2204FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2205{
2206 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2208 if (IEM_IS_MODRM_REG_MODE(bRm))
2209 {
2210 /*
2211 * XMM128, XMM128.
2212 */
2213 IEM_MC_BEGIN(0, 0);
2214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2215 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2216 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2217 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2218 IEM_GET_MODRM_REG(pVCpu, bRm));
2219 IEM_MC_ADVANCE_RIP_AND_FINISH();
2220 IEM_MC_END();
2221 }
2222 else
2223 {
2224 /*
2225 * [mem128], XMM128.
2226 */
2227 IEM_MC_BEGIN(0, 0);
2228 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2230
2231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2233 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2234 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2235
2236 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2237 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2238
2239 IEM_MC_ADVANCE_RIP_AND_FINISH();
2240 IEM_MC_END();
2241 }
2242}
2243
2244
2245/**
2246 * @opcode 0x11
2247 * @oppfx 0xf3
2248 * @opcpuid sse
2249 * @opgroup og_sse_simdfp_datamove
2250 * @opxcpttype 5
2251 * @optest op1=1 op2=2 -> op1=2
2252 * @optest op1=0 op2=-22 -> op1=-22
2253 */
2254FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2255{
2256 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2257 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2258 if (IEM_IS_MODRM_REG_MODE(bRm))
2259 {
2260 /*
2261 * XMM32, XMM32.
2262 */
2263 IEM_MC_BEGIN(0, 0);
2264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2265 IEM_MC_LOCAL(uint32_t, uSrc);
2266
2267 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2268 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2269 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2270 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2271
2272 IEM_MC_ADVANCE_RIP_AND_FINISH();
2273 IEM_MC_END();
2274 }
2275 else
2276 {
2277 /*
2278 * [mem32], XMM32.
2279 */
2280 IEM_MC_BEGIN(0, 0);
2281 IEM_MC_LOCAL(uint32_t, uSrc);
2282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2283
2284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2286 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2287 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2288
2289 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2290 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2291
2292 IEM_MC_ADVANCE_RIP_AND_FINISH();
2293 IEM_MC_END();
2294 }
2295}
2296
2297
2298/**
2299 * @opcode 0x11
2300 * @oppfx 0xf2
2301 * @opcpuid sse2
2302 * @opgroup og_sse2_pcksclr_datamove
2303 * @opxcpttype 5
2304 * @optest op1=1 op2=2 -> op1=2
2305 * @optest op1=0 op2=-42 -> op1=-42
2306 */
2307FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2308{
2309 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2311 if (IEM_IS_MODRM_REG_MODE(bRm))
2312 {
2313 /*
2314 * XMM64, XMM64.
2315 */
2316 IEM_MC_BEGIN(0, 0);
2317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2318 IEM_MC_LOCAL(uint64_t, uSrc);
2319
2320 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2321 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2322 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2323 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2324
2325 IEM_MC_ADVANCE_RIP_AND_FINISH();
2326 IEM_MC_END();
2327 }
2328 else
2329 {
2330 /*
2331 * [mem64], XMM64.
2332 */
2333 IEM_MC_BEGIN(0, 0);
2334 IEM_MC_LOCAL(uint64_t, uSrc);
2335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2336
2337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2339 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2340 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2341
2342 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2343 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2344
2345 IEM_MC_ADVANCE_RIP_AND_FINISH();
2346 IEM_MC_END();
2347 }
2348}
2349
2350
2351FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2352{
2353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2354 if (IEM_IS_MODRM_REG_MODE(bRm))
2355 {
2356 /**
2357 * @opcode 0x12
2358 * @opcodesub 11 mr/reg
2359 * @oppfx none
2360 * @opcpuid sse
2361 * @opgroup og_sse_simdfp_datamove
2362 * @opxcpttype 5
2363 * @optest op1=1 op2=2 -> op1=2
2364 * @optest op1=0 op2=-42 -> op1=-42
2365 */
2366 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2367
2368 IEM_MC_BEGIN(0, 0);
2369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2370 IEM_MC_LOCAL(uint64_t, uSrc);
2371
2372 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2373 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2374 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2375 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2376
2377 IEM_MC_ADVANCE_RIP_AND_FINISH();
2378 IEM_MC_END();
2379 }
2380 else
2381 {
2382 /**
2383 * @opdone
2384 * @opcode 0x12
2385 * @opcodesub !11 mr/reg
2386 * @oppfx none
2387 * @opcpuid sse
2388 * @opgroup og_sse_simdfp_datamove
2389 * @opxcpttype 5
2390 * @optest op1=1 op2=2 -> op1=2
2391 * @optest op1=0 op2=-42 -> op1=-42
2392 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2393 */
2394 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2395
2396 IEM_MC_BEGIN(0, 0);
2397 IEM_MC_LOCAL(uint64_t, uSrc);
2398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2399
2400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2402 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2403 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2404
2405 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2406 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2407
2408 IEM_MC_ADVANCE_RIP_AND_FINISH();
2409 IEM_MC_END();
2410 }
2411}
2412
2413
2414/**
2415 * @opcode 0x12
2416 * @opcodesub !11 mr/reg
2417 * @oppfx 0x66
2418 * @opcpuid sse2
2419 * @opgroup og_sse2_pcksclr_datamove
2420 * @opxcpttype 5
2421 * @optest op1=1 op2=2 -> op1=2
2422 * @optest op1=0 op2=-42 -> op1=-42
2423 */
2424FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2425{
2426 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2427 if (IEM_IS_MODRM_MEM_MODE(bRm))
2428 {
2429 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2430
2431 IEM_MC_BEGIN(0, 0);
2432 IEM_MC_LOCAL(uint64_t, uSrc);
2433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2434
2435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2437 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2438 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2439
2440 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2441 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2442
2443 IEM_MC_ADVANCE_RIP_AND_FINISH();
2444 IEM_MC_END();
2445 }
2446
2447 /**
2448 * @opdone
2449 * @opmnemonic ud660f12m3
2450 * @opcode 0x12
2451 * @opcodesub 11 mr/reg
2452 * @oppfx 0x66
2453 * @opunused immediate
2454 * @opcpuid sse
2455 * @optest ->
2456 */
2457 else
2458 IEMOP_RAISE_INVALID_OPCODE_RET();
2459}
2460
2461
2462/**
2463 * @opcode 0x12
2464 * @oppfx 0xf3
2465 * @opcpuid sse3
2466 * @opgroup og_sse3_pcksclr_datamove
2467 * @opxcpttype 4
2468 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2469 * op1=0x00000002000000020000000100000001
2470 */
2471FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2472{
2473 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2474 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2475 if (IEM_IS_MODRM_REG_MODE(bRm))
2476 {
2477 /*
2478 * XMM, XMM.
2479 */
2480 IEM_MC_BEGIN(0, 0);
2481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2482 IEM_MC_LOCAL(RTUINT128U, uSrc);
2483
2484 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2485 IEM_MC_PREPARE_SSE_USAGE();
2486
2487 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2488 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2489 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2490 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2491 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2492
2493 IEM_MC_ADVANCE_RIP_AND_FINISH();
2494 IEM_MC_END();
2495 }
2496 else
2497 {
2498 /*
2499 * XMM, [mem128].
2500 */
2501 IEM_MC_BEGIN(0, 0);
2502 IEM_MC_LOCAL(RTUINT128U, uSrc);
2503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2504
2505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2507 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2508 IEM_MC_PREPARE_SSE_USAGE();
2509
2510 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2511 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2512 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2513 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2514 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2515
2516 IEM_MC_ADVANCE_RIP_AND_FINISH();
2517 IEM_MC_END();
2518 }
2519}
2520
2521
2522/**
2523 * @opcode 0x12
2524 * @oppfx 0xf2
2525 * @opcpuid sse3
2526 * @opgroup og_sse3_pcksclr_datamove
2527 * @opxcpttype 5
2528 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2529 * op1=0x22222222111111112222222211111111
2530 */
2531FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2532{
2533 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2535 if (IEM_IS_MODRM_REG_MODE(bRm))
2536 {
2537 /*
2538 * XMM128, XMM64.
2539 */
2540 IEM_MC_BEGIN(0, 0);
2541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2542 IEM_MC_LOCAL(uint64_t, uSrc);
2543
2544 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2545 IEM_MC_PREPARE_SSE_USAGE();
2546
2547 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2548 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2549 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2550
2551 IEM_MC_ADVANCE_RIP_AND_FINISH();
2552 IEM_MC_END();
2553 }
2554 else
2555 {
2556 /*
2557 * XMM128, [mem64].
2558 */
2559 IEM_MC_BEGIN(0, 0);
2560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2561 IEM_MC_LOCAL(uint64_t, uSrc);
2562
2563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2565 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2566 IEM_MC_PREPARE_SSE_USAGE();
2567
2568 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2569 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2570 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2571
2572 IEM_MC_ADVANCE_RIP_AND_FINISH();
2573 IEM_MC_END();
2574 }
2575}
2576
2577
2578/**
2579 * @opcode 0x13
2580 * @opcodesub !11 mr/reg
2581 * @oppfx none
2582 * @opcpuid sse
2583 * @opgroup og_sse_simdfp_datamove
2584 * @opxcpttype 5
2585 * @optest op1=1 op2=2 -> op1=2
2586 * @optest op1=0 op2=-42 -> op1=-42
2587 */
2588FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2589{
2590 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2591 if (IEM_IS_MODRM_MEM_MODE(bRm))
2592 {
2593 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2594
2595 IEM_MC_BEGIN(0, 0);
2596 IEM_MC_LOCAL(uint64_t, uSrc);
2597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2598
2599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2601 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2602 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2603
2604 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2605 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2606
2607 IEM_MC_ADVANCE_RIP_AND_FINISH();
2608 IEM_MC_END();
2609 }
2610
2611 /**
2612 * @opdone
2613 * @opmnemonic ud0f13m3
2614 * @opcode 0x13
2615 * @opcodesub 11 mr/reg
2616 * @oppfx none
2617 * @opunused immediate
2618 * @opcpuid sse
2619 * @optest ->
2620 */
2621 else
2622 IEMOP_RAISE_INVALID_OPCODE_RET();
2623}
2624
2625
2626/**
2627 * @opcode 0x13
2628 * @opcodesub !11 mr/reg
2629 * @oppfx 0x66
2630 * @opcpuid sse2
2631 * @opgroup og_sse2_pcksclr_datamove
2632 * @opxcpttype 5
2633 * @optest op1=1 op2=2 -> op1=2
2634 * @optest op1=0 op2=-42 -> op1=-42
2635 */
2636FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2637{
2638 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2639 if (IEM_IS_MODRM_MEM_MODE(bRm))
2640 {
2641 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2642
2643 IEM_MC_BEGIN(0, 0);
2644 IEM_MC_LOCAL(uint64_t, uSrc);
2645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2646
2647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2649 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2650 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2651
2652 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2653 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2654
2655 IEM_MC_ADVANCE_RIP_AND_FINISH();
2656 IEM_MC_END();
2657 }
2658
2659 /**
2660 * @opdone
2661 * @opmnemonic ud660f13m3
2662 * @opcode 0x13
2663 * @opcodesub 11 mr/reg
2664 * @oppfx 0x66
2665 * @opunused immediate
2666 * @opcpuid sse
2667 * @optest ->
2668 */
2669 else
2670 IEMOP_RAISE_INVALID_OPCODE_RET();
2671}
2672
2673
2674/**
2675 * @opmnemonic udf30f13
2676 * @opcode 0x13
2677 * @oppfx 0xf3
2678 * @opunused intel-modrm
2679 * @opcpuid sse
2680 * @optest ->
2681 * @opdone
2682 */
2683
2684/**
2685 * @opmnemonic udf20f13
2686 * @opcode 0x13
2687 * @oppfx 0xf2
2688 * @opunused intel-modrm
2689 * @opcpuid sse
2690 * @optest ->
2691 * @opdone
2692 */
2693
2694/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2695FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2696{
2697 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2698 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2699}
2700
2701
2702/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2703FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2704{
2705 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2706 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2707}
2708
2709
2710/**
2711 * @opdone
2712 * @opmnemonic udf30f14
2713 * @opcode 0x14
2714 * @oppfx 0xf3
2715 * @opunused intel-modrm
2716 * @opcpuid sse
2717 * @optest ->
2718 * @opdone
2719 */
2720
2721/**
2722 * @opmnemonic udf20f14
2723 * @opcode 0x14
2724 * @oppfx 0xf2
2725 * @opunused intel-modrm
2726 * @opcpuid sse
2727 * @optest ->
2728 * @opdone
2729 */
2730
2731/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2732FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2733{
2734 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2735 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2736}
2737
2738
2739/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2740FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2741{
2742 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2743 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2744}
2745
2746
2747/* Opcode 0xf3 0x0f 0x15 - invalid */
2748/* Opcode 0xf2 0x0f 0x15 - invalid */
2749
2750/**
2751 * @opdone
2752 * @opmnemonic udf30f15
2753 * @opcode 0x15
2754 * @oppfx 0xf3
2755 * @opunused intel-modrm
2756 * @opcpuid sse
2757 * @optest ->
2758 * @opdone
2759 */
2760
2761/**
2762 * @opmnemonic udf20f15
2763 * @opcode 0x15
2764 * @oppfx 0xf2
2765 * @opunused intel-modrm
2766 * @opcpuid sse
2767 * @optest ->
2768 * @opdone
2769 */
2770
2771FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2772{
2773 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2774 if (IEM_IS_MODRM_REG_MODE(bRm))
2775 {
2776 /**
2777 * @opcode 0x16
2778 * @opcodesub 11 mr/reg
2779 * @oppfx none
2780 * @opcpuid sse
2781 * @opgroup og_sse_simdfp_datamove
2782 * @opxcpttype 5
2783 * @optest op1=1 op2=2 -> op1=2
2784 * @optest op1=0 op2=-42 -> op1=-42
2785 */
2786 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2787
2788 IEM_MC_BEGIN(0, 0);
2789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2790 IEM_MC_LOCAL(uint64_t, uSrc);
2791
2792 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2793 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2794 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2795 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2796
2797 IEM_MC_ADVANCE_RIP_AND_FINISH();
2798 IEM_MC_END();
2799 }
2800 else
2801 {
2802 /**
2803 * @opdone
2804 * @opcode 0x16
2805 * @opcodesub !11 mr/reg
2806 * @oppfx none
2807 * @opcpuid sse
2808 * @opgroup og_sse_simdfp_datamove
2809 * @opxcpttype 5
2810 * @optest op1=1 op2=2 -> op1=2
2811 * @optest op1=0 op2=-42 -> op1=-42
2812 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2813 */
2814 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2815
2816 IEM_MC_BEGIN(0, 0);
2817 IEM_MC_LOCAL(uint64_t, uSrc);
2818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2819
2820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2822 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2823 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2824
2825 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2826 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2827
2828 IEM_MC_ADVANCE_RIP_AND_FINISH();
2829 IEM_MC_END();
2830 }
2831}
2832
2833
2834/**
2835 * @opcode 0x16
2836 * @opcodesub !11 mr/reg
2837 * @oppfx 0x66
2838 * @opcpuid sse2
2839 * @opgroup og_sse2_pcksclr_datamove
2840 * @opxcpttype 5
2841 * @optest op1=1 op2=2 -> op1=2
2842 * @optest op1=0 op2=-42 -> op1=-42
2843 */
2844FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2845{
2846 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2847 if (IEM_IS_MODRM_MEM_MODE(bRm))
2848 {
2849 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2850
2851 IEM_MC_BEGIN(0, 0);
2852 IEM_MC_LOCAL(uint64_t, uSrc);
2853 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2854
2855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2857 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2858 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2859
2860 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2861 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2862
2863 IEM_MC_ADVANCE_RIP_AND_FINISH();
2864 IEM_MC_END();
2865 }
2866
2867 /**
2868 * @opdone
2869 * @opmnemonic ud660f16m3
2870 * @opcode 0x16
2871 * @opcodesub 11 mr/reg
2872 * @oppfx 0x66
2873 * @opunused immediate
2874 * @opcpuid sse
2875 * @optest ->
2876 */
2877 else
2878 IEMOP_RAISE_INVALID_OPCODE_RET();
2879}
2880
2881
2882/**
2883 * @opcode 0x16
2884 * @oppfx 0xf3
2885 * @opcpuid sse3
2886 * @opgroup og_sse3_pcksclr_datamove
2887 * @opxcpttype 4
2888 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
2889 * op1=0x00000002000000020000000100000001
2890 */
2891FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
2892{
2893 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2895 if (IEM_IS_MODRM_REG_MODE(bRm))
2896 {
2897 /*
2898 * XMM128, XMM128.
2899 */
2900 IEM_MC_BEGIN(0, 0);
2901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2902 IEM_MC_LOCAL(RTUINT128U, uSrc);
2903
2904 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2905 IEM_MC_PREPARE_SSE_USAGE();
2906
2907 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2908 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
2909 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
2910 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
2911 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
2912
2913 IEM_MC_ADVANCE_RIP_AND_FINISH();
2914 IEM_MC_END();
2915 }
2916 else
2917 {
2918 /*
2919 * XMM128, [mem128].
2920 */
2921 IEM_MC_BEGIN(0, 0);
2922 IEM_MC_LOCAL(RTUINT128U, uSrc);
2923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2924
2925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2927 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2928 IEM_MC_PREPARE_SSE_USAGE();
2929
2930 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2931 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
2932 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
2933 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
2934 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
2935
2936 IEM_MC_ADVANCE_RIP_AND_FINISH();
2937 IEM_MC_END();
2938 }
2939}
2940
2941/**
2942 * @opdone
2943 * @opmnemonic udf30f16
2944 * @opcode 0x16
2945 * @oppfx 0xf2
2946 * @opunused intel-modrm
2947 * @opcpuid sse
2948 * @optest ->
2949 * @opdone
2950 */
2951
2952
2953/**
2954 * @opcode 0x17
2955 * @opcodesub !11 mr/reg
2956 * @oppfx none
2957 * @opcpuid sse
2958 * @opgroup og_sse_simdfp_datamove
2959 * @opxcpttype 5
2960 * @optest op1=1 op2=2 -> op1=2
2961 * @optest op1=0 op2=-42 -> op1=-42
2962 */
2963FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2964{
2965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2966 if (IEM_IS_MODRM_MEM_MODE(bRm))
2967 {
2968 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2969
2970 IEM_MC_BEGIN(0, 0);
2971 IEM_MC_LOCAL(uint64_t, uSrc);
2972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2973
2974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2976 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2977 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2978
2979 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
2980 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2981
2982 IEM_MC_ADVANCE_RIP_AND_FINISH();
2983 IEM_MC_END();
2984 }
2985
2986 /**
2987 * @opdone
2988 * @opmnemonic ud0f17m3
2989 * @opcode 0x17
2990 * @opcodesub 11 mr/reg
2991 * @oppfx none
2992 * @opunused immediate
2993 * @opcpuid sse
2994 * @optest ->
2995 */
2996 else
2997 IEMOP_RAISE_INVALID_OPCODE_RET();
2998}
2999
3000
3001/**
3002 * @opcode 0x17
3003 * @opcodesub !11 mr/reg
3004 * @oppfx 0x66
3005 * @opcpuid sse2
3006 * @opgroup og_sse2_pcksclr_datamove
3007 * @opxcpttype 5
3008 * @optest op1=1 op2=2 -> op1=2
3009 * @optest op1=0 op2=-42 -> op1=-42
3010 */
3011FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3012{
3013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3014 if (IEM_IS_MODRM_MEM_MODE(bRm))
3015 {
3016 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3017
3018 IEM_MC_BEGIN(0, 0);
3019 IEM_MC_LOCAL(uint64_t, uSrc);
3020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3021
3022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3024 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3025 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3026
3027 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3028 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3029
3030 IEM_MC_ADVANCE_RIP_AND_FINISH();
3031 IEM_MC_END();
3032 }
3033
3034 /**
3035 * @opdone
3036 * @opmnemonic ud660f17m3
3037 * @opcode 0x17
3038 * @opcodesub 11 mr/reg
3039 * @oppfx 0x66
3040 * @opunused immediate
3041 * @opcpuid sse
3042 * @optest ->
3043 */
3044 else
3045 IEMOP_RAISE_INVALID_OPCODE_RET();
3046}
3047
3048
3049/**
3050 * @opdone
3051 * @opmnemonic udf30f17
3052 * @opcode 0x17
3053 * @oppfx 0xf3
3054 * @opunused intel-modrm
3055 * @opcpuid sse
3056 * @optest ->
3057 * @opdone
3058 */
3059
3060/**
3061 * @opmnemonic udf20f17
3062 * @opcode 0x17
3063 * @oppfx 0xf2
3064 * @opunused intel-modrm
3065 * @opcpuid sse
3066 * @optest ->
3067 * @opdone
3068 */
3069
3070
3071/** Opcode 0x0f 0x18. */
3072FNIEMOP_DEF(iemOp_prefetch_Grp16)
3073{
3074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3075 if (IEM_IS_MODRM_MEM_MODE(bRm))
3076 {
3077 switch (IEM_GET_MODRM_REG_8(bRm))
3078 {
3079 case 4: /* Aliased to /0 for the time being according to AMD. */
3080 case 5: /* Aliased to /0 for the time being according to AMD. */
3081 case 6: /* Aliased to /0 for the time being according to AMD. */
3082 case 7: /* Aliased to /0 for the time being according to AMD. */
3083 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3084 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3085 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3086 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3087 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3088 }
3089
3090 IEM_MC_BEGIN(0, 0);
3091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3094 /* Currently a NOP. */
3095 IEM_MC_NOREF(GCPtrEffSrc);
3096 IEM_MC_ADVANCE_RIP_AND_FINISH();
3097 IEM_MC_END();
3098 }
3099 else
3100 IEMOP_RAISE_INVALID_OPCODE_RET();
3101}
3102
3103
3104/** Opcode 0x0f 0x19..0x1f. */
3105FNIEMOP_DEF(iemOp_nop_Ev)
3106{
3107 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3109 if (IEM_IS_MODRM_REG_MODE(bRm))
3110 {
3111 IEM_MC_BEGIN(0, 0);
3112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3113 IEM_MC_ADVANCE_RIP_AND_FINISH();
3114 IEM_MC_END();
3115 }
3116 else
3117 {
3118 IEM_MC_BEGIN(0, 0);
3119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3122 /* Currently a NOP. */
3123 IEM_MC_NOREF(GCPtrEffSrc);
3124 IEM_MC_ADVANCE_RIP_AND_FINISH();
3125 IEM_MC_END();
3126 }
3127}
3128
3129
3130/** Opcode 0x0f 0x20. */
3131FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3132{
3133 /* mod is ignored, as is operand size overrides. */
3134/** @todo testcase: check memory encoding. */
3135 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3136 IEMOP_HLP_MIN_386();
3137 if (IEM_IS_64BIT_CODE(pVCpu))
3138 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3139 else
3140 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3141
3142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3143 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3144 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3145 {
3146 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3147 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3148 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3149 iCrReg |= 8;
3150 }
3151 switch (iCrReg)
3152 {
3153 case 0: case 2: case 3: case 4: case 8:
3154 break;
3155 default:
3156 IEMOP_RAISE_INVALID_OPCODE_RET();
3157 }
3158 IEMOP_HLP_DONE_DECODING();
3159
3160 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3161 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3162 iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3163}
3164
3165
3166/** Opcode 0x0f 0x21. */
3167FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3168{
3169/** @todo testcase: check memory encoding. */
3170 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3171 IEMOP_HLP_MIN_386();
3172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3174 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3175 IEMOP_RAISE_INVALID_OPCODE_RET();
3176 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3177 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3178 iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3179}
3180
3181
3182/** Opcode 0x0f 0x22. */
3183FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3184{
3185 /* mod is ignored, as is operand size overrides. */
3186 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3187 IEMOP_HLP_MIN_386();
3188 if (IEM_IS_64BIT_CODE(pVCpu))
3189 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3190 else
3191 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3192
3193 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3194 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3195 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3196 {
3197 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3198 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3199 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3200 iCrReg |= 8;
3201 }
3202 switch (iCrReg)
3203 {
3204 case 0: case 2: case 3: case 4: case 8:
3205 break;
3206 default:
3207 IEMOP_RAISE_INVALID_OPCODE_RET();
3208 }
3209 IEMOP_HLP_DONE_DECODING();
3210
3211 /** @todo r=aeichner Split this up as flushing the cr0 is excessive for crX != 0? */
3212 if (iCrReg & (2 | 8))
3213 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, 0,
3214 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3215 else
3216 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0) | RT_BIT_64(kIemNativeGstReg_Cr4),
3217 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3218}
3219
3220
3221/** Opcode 0x0f 0x23. */
3222FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3223{
3224 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3225 IEMOP_HLP_MIN_386();
3226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3228 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3229 IEMOP_RAISE_INVALID_OPCODE_RET();
3230 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, 0,
3231 iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3232}
3233
3234
3235/** Opcode 0x0f 0x24. */
3236FNIEMOP_DEF(iemOp_mov_Rd_Td)
3237{
3238 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3239 IEMOP_HLP_MIN_386();
3240 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3242 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3243 IEMOP_RAISE_INVALID_OPCODE_RET();
3244 IEM_MC_DEFER_TO_CIMPL_2_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3245 iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3246}
3247
3248
3249/** Opcode 0x0f 0x26. */
3250FNIEMOP_DEF(iemOp_mov_Td_Rd)
3251{
3252 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3253 IEMOP_HLP_MIN_386();
3254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3256 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3257 IEMOP_RAISE_INVALID_OPCODE_RET();
3258 IEM_MC_DEFER_TO_CIMPL_2_RET(0, 0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3259}
3260
3261
3262/**
3263 * @opcode 0x28
3264 * @oppfx none
3265 * @opcpuid sse
3266 * @opgroup og_sse_simdfp_datamove
3267 * @opxcpttype 1
3268 * @optest op1=1 op2=2 -> op1=2
3269 * @optest op1=0 op2=-42 -> op1=-42
3270 */
3271FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3272{
3273 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3274 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3275 if (IEM_IS_MODRM_REG_MODE(bRm))
3276 {
3277 /*
3278 * Register, register.
3279 */
3280 IEM_MC_BEGIN(0, 0);
3281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3282 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3283 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3284 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3285 IEM_GET_MODRM_RM(pVCpu, bRm));
3286 IEM_MC_ADVANCE_RIP_AND_FINISH();
3287 IEM_MC_END();
3288 }
3289 else
3290 {
3291 /*
3292 * Register, memory.
3293 */
3294 IEM_MC_BEGIN(0, 0);
3295 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3297
3298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3300 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3301 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3302
3303 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3304 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3305
3306 IEM_MC_ADVANCE_RIP_AND_FINISH();
3307 IEM_MC_END();
3308 }
3309}
3310
3311/**
3312 * @opcode 0x28
3313 * @oppfx 66
3314 * @opcpuid sse2
3315 * @opgroup og_sse2_pcksclr_datamove
3316 * @opxcpttype 1
3317 * @optest op1=1 op2=2 -> op1=2
3318 * @optest op1=0 op2=-42 -> op1=-42
3319 */
3320FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3321{
3322 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3323 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3324 if (IEM_IS_MODRM_REG_MODE(bRm))
3325 {
3326 /*
3327 * Register, register.
3328 */
3329 IEM_MC_BEGIN(0, 0);
3330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3331 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3332 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3333 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3334 IEM_GET_MODRM_RM(pVCpu, bRm));
3335 IEM_MC_ADVANCE_RIP_AND_FINISH();
3336 IEM_MC_END();
3337 }
3338 else
3339 {
3340 /*
3341 * Register, memory.
3342 */
3343 IEM_MC_BEGIN(0, 0);
3344 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3346
3347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3349 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3350 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3351
3352 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3353 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3354
3355 IEM_MC_ADVANCE_RIP_AND_FINISH();
3356 IEM_MC_END();
3357 }
3358}
3359
3360/* Opcode 0xf3 0x0f 0x28 - invalid */
3361/* Opcode 0xf2 0x0f 0x28 - invalid */
3362
3363/**
3364 * @opcode 0x29
3365 * @oppfx none
3366 * @opcpuid sse
3367 * @opgroup og_sse_simdfp_datamove
3368 * @opxcpttype 1
3369 * @optest op1=1 op2=2 -> op1=2
3370 * @optest op1=0 op2=-42 -> op1=-42
3371 */
3372FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3373{
3374 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3375 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3376 if (IEM_IS_MODRM_REG_MODE(bRm))
3377 {
3378 /*
3379 * Register, register.
3380 */
3381 IEM_MC_BEGIN(0, 0);
3382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3383 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3384 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3385 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3386 IEM_GET_MODRM_REG(pVCpu, bRm));
3387 IEM_MC_ADVANCE_RIP_AND_FINISH();
3388 IEM_MC_END();
3389 }
3390 else
3391 {
3392 /*
3393 * Memory, register.
3394 */
3395 IEM_MC_BEGIN(0, 0);
3396 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3398
3399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3401 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3402 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3403
3404 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3405 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3406
3407 IEM_MC_ADVANCE_RIP_AND_FINISH();
3408 IEM_MC_END();
3409 }
3410}
3411
3412/**
3413 * @opcode 0x29
3414 * @oppfx 66
3415 * @opcpuid sse2
3416 * @opgroup og_sse2_pcksclr_datamove
3417 * @opxcpttype 1
3418 * @optest op1=1 op2=2 -> op1=2
3419 * @optest op1=0 op2=-42 -> op1=-42
3420 */
3421FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3422{
3423 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3425 if (IEM_IS_MODRM_REG_MODE(bRm))
3426 {
3427 /*
3428 * Register, register.
3429 */
3430 IEM_MC_BEGIN(0, 0);
3431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3432 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3433 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3434 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3435 IEM_GET_MODRM_REG(pVCpu, bRm));
3436 IEM_MC_ADVANCE_RIP_AND_FINISH();
3437 IEM_MC_END();
3438 }
3439 else
3440 {
3441 /*
3442 * Memory, register.
3443 */
3444 IEM_MC_BEGIN(0, 0);
3445 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3447
3448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3450 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3451 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3452
3453 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3454 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3455
3456 IEM_MC_ADVANCE_RIP_AND_FINISH();
3457 IEM_MC_END();
3458 }
3459}
3460
3461/* Opcode 0xf3 0x0f 0x29 - invalid */
3462/* Opcode 0xf2 0x0f 0x29 - invalid */
3463
3464
3465/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3466FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3467{
3468 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3470 if (IEM_IS_MODRM_REG_MODE(bRm))
3471 {
3472 /*
3473 * XMM, MMX
3474 */
3475 IEM_MC_BEGIN(0, 0);
3476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3477 IEM_MC_LOCAL(X86XMMREG, Dst);
3478 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3479 IEM_MC_ARG(uint64_t, u64Src, 1);
3480 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3481 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3482 IEM_MC_PREPARE_FPU_USAGE();
3483 IEM_MC_FPU_TO_MMX_MODE();
3484
3485 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3486 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3487
3488 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2ps_u128, pDst, u64Src);
3489 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3490 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3491
3492 IEM_MC_ADVANCE_RIP_AND_FINISH();
3493 IEM_MC_END();
3494 }
3495 else
3496 {
3497 /*
3498 * XMM, [mem64]
3499 */
3500 IEM_MC_BEGIN(0, 0);
3501 IEM_MC_LOCAL(X86XMMREG, Dst);
3502 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3503 IEM_MC_ARG(uint64_t, u64Src, 1);
3504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3505
3506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3508 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3509 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3510 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3511
3512 IEM_MC_PREPARE_FPU_USAGE();
3513 IEM_MC_FPU_TO_MMX_MODE();
3514
3515 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2ps_u128, pDst, u64Src);
3516 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3517 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3518
3519 IEM_MC_ADVANCE_RIP_AND_FINISH();
3520 IEM_MC_END();
3521 }
3522}
3523
3524
3525/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3526FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3527{
3528 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3529 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3530 if (IEM_IS_MODRM_REG_MODE(bRm))
3531 {
3532 /*
3533 * XMM, MMX
3534 */
3535 IEM_MC_BEGIN(0, 0);
3536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3537 IEM_MC_LOCAL(X86XMMREG, Dst);
3538 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3539 IEM_MC_ARG(uint64_t, u64Src, 1);
3540 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3541 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3542 IEM_MC_PREPARE_FPU_USAGE();
3543 IEM_MC_FPU_TO_MMX_MODE();
3544
3545 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3546
3547 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2pd_u128, pDst, u64Src);
3548 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3549 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3550
3551 IEM_MC_ADVANCE_RIP_AND_FINISH();
3552 IEM_MC_END();
3553 }
3554 else
3555 {
3556 /*
3557 * XMM, [mem64]
3558 */
3559 IEM_MC_BEGIN(0, 0);
3560 IEM_MC_LOCAL(X86XMMREG, Dst);
3561 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3562 IEM_MC_ARG(uint64_t, u64Src, 1);
3563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3564
3565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3567 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3568 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3569 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3570
3571 /* Doesn't cause a transition to MMX mode. */
3572 IEM_MC_PREPARE_SSE_USAGE();
3573
3574 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2pd_u128, pDst, u64Src);
3575 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3576 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3577
3578 IEM_MC_ADVANCE_RIP_AND_FINISH();
3579 IEM_MC_END();
3580 }
3581}
3582
3583
3584/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3585FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3586{
3587 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3588
3589 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3590 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3591 {
3592 if (IEM_IS_MODRM_REG_MODE(bRm))
3593 {
3594 /* XMM, greg64 */
3595 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3596 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3597 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3598 IEM_MC_ARG(const int64_t *, pi64Src, 1);
3599
3600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3601 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3602 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3603
3604 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3605 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i64, pr32Dst, pi64Src);
3606 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3607 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3608
3609 IEM_MC_ADVANCE_RIP_AND_FINISH();
3610 IEM_MC_END();
3611 }
3612 else
3613 {
3614 /* XMM, [mem64] */
3615 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3617 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3618 IEM_MC_LOCAL(int64_t, i64Src);
3619 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3620 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 1);
3621
3622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3624 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3625 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3626
3627 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3628 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i64, pr32Dst, pi64Src);
3629 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3630 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3631
3632 IEM_MC_ADVANCE_RIP_AND_FINISH();
3633 IEM_MC_END();
3634 }
3635 }
3636 else
3637 {
3638 if (IEM_IS_MODRM_REG_MODE(bRm))
3639 {
3640 /* greg, XMM */
3641 IEM_MC_BEGIN(0, 0);
3642 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3643 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3644 IEM_MC_ARG(const int32_t *, pi32Src, 1);
3645
3646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3647 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3648 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3649
3650 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3651 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i32, pr32Dst, pi32Src);
3652 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3653 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3654
3655 IEM_MC_ADVANCE_RIP_AND_FINISH();
3656 IEM_MC_END();
3657 }
3658 else
3659 {
3660 /* greg, [mem32] */
3661 IEM_MC_BEGIN(0, 0);
3662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3663 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3664 IEM_MC_LOCAL(int32_t, i32Src);
3665 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3666 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 1);
3667
3668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3670 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3671 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3672
3673 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3674 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i32, pr32Dst, pi32Src);
3675 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3676 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3677
3678 IEM_MC_ADVANCE_RIP_AND_FINISH();
3679 IEM_MC_END();
3680 }
3681 }
3682}
3683
3684
3685/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3686FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3687{
3688 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3689
3690 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3691 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3692 {
3693 if (IEM_IS_MODRM_REG_MODE(bRm))
3694 {
3695 /* XMM, greg64 */
3696 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3697 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3698 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3699 IEM_MC_ARG(const int64_t *, pi64Src, 1);
3700
3701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3702 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3703 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3704
3705 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3706 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i64, pr64Dst, pi64Src);
3707 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3708 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3709
3710 IEM_MC_ADVANCE_RIP_AND_FINISH();
3711 IEM_MC_END();
3712 }
3713 else
3714 {
3715 /* XMM, [mem64] */
3716 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3718 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3719 IEM_MC_LOCAL(int64_t, i64Src);
3720 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3721 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 1);
3722
3723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3725 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3726 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3727
3728 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3729 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i64, pr64Dst, pi64Src);
3730 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3731 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3732
3733 IEM_MC_ADVANCE_RIP_AND_FINISH();
3734 IEM_MC_END();
3735 }
3736 }
3737 else
3738 {
3739 if (IEM_IS_MODRM_REG_MODE(bRm))
3740 {
3741 /* XMM, greg32 */
3742 IEM_MC_BEGIN(0, 0);
3743 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3744 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3745 IEM_MC_ARG(const int32_t *, pi32Src, 1);
3746
3747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3748 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3749 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3750
3751 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3752 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i32, pr64Dst, pi32Src);
3753 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3754 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3755
3756 IEM_MC_ADVANCE_RIP_AND_FINISH();
3757 IEM_MC_END();
3758 }
3759 else
3760 {
3761 /* XMM, [mem32] */
3762 IEM_MC_BEGIN(0, 0);
3763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3764 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3765 IEM_MC_LOCAL(int32_t, i32Src);
3766 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3767 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 1);
3768
3769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3771 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3772 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3773
3774 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3775 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i32, pr64Dst, pi32Src);
3776 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3777 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3778
3779 IEM_MC_ADVANCE_RIP_AND_FINISH();
3780 IEM_MC_END();
3781 }
3782 }
3783}
3784
3785
3786/**
3787 * @opcode 0x2b
3788 * @opcodesub !11 mr/reg
3789 * @oppfx none
3790 * @opcpuid sse
3791 * @opgroup og_sse1_cachect
3792 * @opxcpttype 1
3793 * @optest op1=1 op2=2 -> op1=2
3794 * @optest op1=0 op2=-42 -> op1=-42
3795 */
3796FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
3797{
3798 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3800 if (IEM_IS_MODRM_MEM_MODE(bRm))
3801 {
3802 /*
3803 * memory, register.
3804 */
3805 IEM_MC_BEGIN(0, 0);
3806 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3808
3809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3811 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3812 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3813
3814 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3815 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3816
3817 IEM_MC_ADVANCE_RIP_AND_FINISH();
3818 IEM_MC_END();
3819 }
3820 /* The register, register encoding is invalid. */
3821 else
3822 IEMOP_RAISE_INVALID_OPCODE_RET();
3823}
3824
3825/**
3826 * @opcode 0x2b
3827 * @opcodesub !11 mr/reg
3828 * @oppfx 0x66
3829 * @opcpuid sse2
3830 * @opgroup og_sse2_cachect
3831 * @opxcpttype 1
3832 * @optest op1=1 op2=2 -> op1=2
3833 * @optest op1=0 op2=-42 -> op1=-42
3834 */
3835FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
3836{
3837 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3838 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3839 if (IEM_IS_MODRM_MEM_MODE(bRm))
3840 {
3841 /*
3842 * memory, register.
3843 */
3844 IEM_MC_BEGIN(0, 0);
3845 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3847
3848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3850 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3851 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3852
3853 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3854 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3855
3856 IEM_MC_ADVANCE_RIP_AND_FINISH();
3857 IEM_MC_END();
3858 }
3859 /* The register, register encoding is invalid. */
3860 else
3861 IEMOP_RAISE_INVALID_OPCODE_RET();
3862}
3863/* Opcode 0xf3 0x0f 0x2b - invalid */
3864/* Opcode 0xf2 0x0f 0x2b - invalid */
3865
3866
3867/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
3868FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
3869{
3870 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3872 if (IEM_IS_MODRM_REG_MODE(bRm))
3873 {
3874 /*
3875 * Register, register.
3876 */
3877 IEM_MC_BEGIN(0, 0);
3878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3879 IEM_MC_LOCAL(uint64_t, u64Dst);
3880 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
3881 IEM_MC_ARG(uint64_t, u64Src, 1);
3882 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3883 IEM_MC_PREPARE_FPU_USAGE();
3884 IEM_MC_FPU_TO_MMX_MODE();
3885
3886 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
3887
3888 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttps2pi_u128, pu64Dst, u64Src);
3889 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3890 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
3891
3892 IEM_MC_ADVANCE_RIP_AND_FINISH();
3893 IEM_MC_END();
3894 }
3895 else
3896 {
3897 /*
3898 * Register, memory.
3899 */
3900 IEM_MC_BEGIN(0, 0);
3901 IEM_MC_LOCAL(uint64_t, u64Dst);
3902 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
3903 IEM_MC_ARG(uint64_t, u64Src, 1);
3904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3905
3906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3908 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3909 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3910
3911 IEM_MC_PREPARE_FPU_USAGE();
3912 IEM_MC_FPU_TO_MMX_MODE();
3913
3914 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttps2pi_u128, pu64Dst, u64Src);
3915 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3916 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
3917
3918 IEM_MC_ADVANCE_RIP_AND_FINISH();
3919 IEM_MC_END();
3920 }
3921}
3922
3923
3924/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
3925FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
3926{
3927 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3928 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3929 if (IEM_IS_MODRM_REG_MODE(bRm))
3930 {
3931 /*
3932 * Register, register.
3933 */
3934 IEM_MC_BEGIN(0, 0);
3935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3936 IEM_MC_LOCAL(uint64_t, u64Dst);
3937 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
3938 IEM_MC_ARG(PCX86XMMREG, pSrc, 1);
3939 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3940 IEM_MC_PREPARE_FPU_USAGE();
3941 IEM_MC_FPU_TO_MMX_MODE();
3942
3943 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3944
3945 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttpd2pi_u128, pu64Dst, pSrc);
3946 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3947 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
3948
3949 IEM_MC_ADVANCE_RIP_AND_FINISH();
3950 IEM_MC_END();
3951 }
3952 else
3953 {
3954 /*
3955 * Register, memory.
3956 */
3957 IEM_MC_BEGIN(0, 0);
3958 IEM_MC_LOCAL(uint64_t, u64Dst);
3959 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
3960 IEM_MC_LOCAL(X86XMMREG, uSrc);
3961 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 1);
3962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3963
3964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3966 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3967 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3968
3969 IEM_MC_PREPARE_FPU_USAGE();
3970 IEM_MC_FPU_TO_MMX_MODE();
3971
3972 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttpd2pi_u128, pu64Dst, pSrc);
3973 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3974 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
3975
3976 IEM_MC_ADVANCE_RIP_AND_FINISH();
3977 IEM_MC_END();
3978 }
3979}
3980
3981
3982/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
3983FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
3984{
3985 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3986
3987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3988 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3989 {
3990 if (IEM_IS_MODRM_REG_MODE(bRm))
3991 {
3992 /* greg64, XMM */
3993 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3994 IEM_MC_LOCAL(int64_t, i64Dst);
3995 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
3996 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
3997
3998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3999 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4000 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4001
4002 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4003 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i64_r32, pi64Dst, pu32Src);
4004 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4005 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4006
4007 IEM_MC_ADVANCE_RIP_AND_FINISH();
4008 IEM_MC_END();
4009 }
4010 else
4011 {
4012 /* greg64, [mem64] */
4013 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4015 IEM_MC_LOCAL(int64_t, i64Dst);
4016 IEM_MC_LOCAL(uint32_t, u32Src);
4017 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4018 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4019
4020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4022 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4023 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4024
4025 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4026 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i64_r32, pi64Dst, pu32Src);
4027 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4028 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4029
4030 IEM_MC_ADVANCE_RIP_AND_FINISH();
4031 IEM_MC_END();
4032 }
4033 }
4034 else
4035 {
4036 if (IEM_IS_MODRM_REG_MODE(bRm))
4037 {
4038 /* greg, XMM */
4039 IEM_MC_BEGIN(0, 0);
4040 IEM_MC_LOCAL(int32_t, i32Dst);
4041 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4042 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4043
4044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4045 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4046 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4047
4048 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4049 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i32_r32, pi32Dst, pu32Src);
4050 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4051 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4052
4053 IEM_MC_ADVANCE_RIP_AND_FINISH();
4054 IEM_MC_END();
4055 }
4056 else
4057 {
4058 /* greg, [mem] */
4059 IEM_MC_BEGIN(0, 0);
4060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4061 IEM_MC_LOCAL(int32_t, i32Dst);
4062 IEM_MC_LOCAL(uint32_t, u32Src);
4063 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4064 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4065
4066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4068 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4069 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4070
4071 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4072 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i32_r32, pi32Dst, pu32Src);
4073 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4074 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4075
4076 IEM_MC_ADVANCE_RIP_AND_FINISH();
4077 IEM_MC_END();
4078 }
4079 }
4080}
4081
4082
4083/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4084FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4085{
4086 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4087
4088 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4089 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4090 {
4091 if (IEM_IS_MODRM_REG_MODE(bRm))
4092 {
4093 /* greg64, XMM */
4094 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4095 IEM_MC_LOCAL(int64_t, i64Dst);
4096 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4097 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4098
4099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4100 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4101 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4102
4103 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4104 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i64_r64, pi64Dst, pu64Src);
4105 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4106 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4107
4108 IEM_MC_ADVANCE_RIP_AND_FINISH();
4109 IEM_MC_END();
4110 }
4111 else
4112 {
4113 /* greg64, [mem64] */
4114 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4116 IEM_MC_LOCAL(int64_t, i64Dst);
4117 IEM_MC_LOCAL(uint64_t, u64Src);
4118 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4119 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4120
4121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4123 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4124 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4125
4126 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4127 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i64_r64, pi64Dst, pu64Src);
4128 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4129 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4130
4131 IEM_MC_ADVANCE_RIP_AND_FINISH();
4132 IEM_MC_END();
4133 }
4134 }
4135 else
4136 {
4137 if (IEM_IS_MODRM_REG_MODE(bRm))
4138 {
4139 /* greg, XMM */
4140 IEM_MC_BEGIN(0, 0);
4141 IEM_MC_LOCAL(int32_t, i32Dst);
4142 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4143 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4144
4145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4146 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4147 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4148
4149 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4150 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i32_r64, pi32Dst, pu64Src);
4151 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4152 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4153
4154 IEM_MC_ADVANCE_RIP_AND_FINISH();
4155 IEM_MC_END();
4156 }
4157 else
4158 {
4159 /* greg32, [mem32] */
4160 IEM_MC_BEGIN(0, 0);
4161 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4162 IEM_MC_LOCAL(int32_t, i32Dst);
4163 IEM_MC_LOCAL(uint64_t, u64Src);
4164 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4165 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4166
4167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4169 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4170 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4171
4172 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4173 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i32_r64, pi32Dst, pu64Src);
4174 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4175 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4176
4177 IEM_MC_ADVANCE_RIP_AND_FINISH();
4178 IEM_MC_END();
4179 }
4180 }
4181}
4182
4183
4184/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4185FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4186{
4187 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4188 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4189 if (IEM_IS_MODRM_REG_MODE(bRm))
4190 {
4191 /*
4192 * Register, register.
4193 */
4194 IEM_MC_BEGIN(0, 0);
4195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4196 IEM_MC_LOCAL(uint64_t, u64Dst);
4197 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4198 IEM_MC_ARG(uint64_t, u64Src, 1);
4199
4200 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4201 IEM_MC_PREPARE_FPU_USAGE();
4202 IEM_MC_FPU_TO_MMX_MODE();
4203
4204 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4205
4206 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pi_u128, pu64Dst, u64Src);
4207 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4208 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4209
4210 IEM_MC_ADVANCE_RIP_AND_FINISH();
4211 IEM_MC_END();
4212 }
4213 else
4214 {
4215 /*
4216 * Register, memory.
4217 */
4218 IEM_MC_BEGIN(0, 0);
4219 IEM_MC_LOCAL(uint64_t, u64Dst);
4220 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4221 IEM_MC_ARG(uint64_t, u64Src, 1);
4222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4223
4224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4226 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4227 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4228
4229 IEM_MC_PREPARE_FPU_USAGE();
4230 IEM_MC_FPU_TO_MMX_MODE();
4231
4232 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pi_u128, pu64Dst, u64Src);
4233 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4234 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4235
4236 IEM_MC_ADVANCE_RIP_AND_FINISH();
4237 IEM_MC_END();
4238 }
4239}
4240
4241
4242/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4243FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4244{
4245 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4247 if (IEM_IS_MODRM_REG_MODE(bRm))
4248 {
4249 /*
4250 * Register, register.
4251 */
4252 IEM_MC_BEGIN(0, 0);
4253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4254 IEM_MC_LOCAL(uint64_t, u64Dst);
4255 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4256 IEM_MC_ARG(PCX86XMMREG, pSrc, 1);
4257
4258 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4259 IEM_MC_PREPARE_FPU_USAGE();
4260 IEM_MC_FPU_TO_MMX_MODE();
4261
4262 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4263
4264 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpd2pi_u128, pu64Dst, pSrc);
4265 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4266 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4267
4268 IEM_MC_ADVANCE_RIP_AND_FINISH();
4269 IEM_MC_END();
4270 }
4271 else
4272 {
4273 /*
4274 * Register, memory.
4275 */
4276 IEM_MC_BEGIN(0, 0);
4277 IEM_MC_LOCAL(uint64_t, u64Dst);
4278 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4279 IEM_MC_LOCAL(X86XMMREG, uSrc);
4280 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 1);
4281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4282
4283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4285 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4286 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4287
4288 IEM_MC_PREPARE_FPU_USAGE();
4289 IEM_MC_FPU_TO_MMX_MODE();
4290
4291 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpd2pi_u128, pu64Dst, pSrc);
4292 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4293 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4294
4295 IEM_MC_ADVANCE_RIP_AND_FINISH();
4296 IEM_MC_END();
4297 }
4298}
4299
4300
4301/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4302FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4303{
4304 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4305
4306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4307 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4308 {
4309 if (IEM_IS_MODRM_REG_MODE(bRm))
4310 {
4311 /* greg64, XMM */
4312 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4313 IEM_MC_LOCAL(int64_t, i64Dst);
4314 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4315 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4316
4317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4318 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4319 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4320
4321 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4322 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i64_r32, pi64Dst, pu32Src);
4323 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4324 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4325
4326 IEM_MC_ADVANCE_RIP_AND_FINISH();
4327 IEM_MC_END();
4328 }
4329 else
4330 {
4331 /* greg64, [mem64] */
4332 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4334 IEM_MC_LOCAL(int64_t, i64Dst);
4335 IEM_MC_LOCAL(uint32_t, u32Src);
4336 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4337 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4338
4339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4341 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4342 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4343
4344 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4345 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i64_r32, pi64Dst, pu32Src);
4346 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4347 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4348
4349 IEM_MC_ADVANCE_RIP_AND_FINISH();
4350 IEM_MC_END();
4351 }
4352 }
4353 else
4354 {
4355 if (IEM_IS_MODRM_REG_MODE(bRm))
4356 {
4357 /* greg, XMM */
4358 IEM_MC_BEGIN(0, 0);
4359 IEM_MC_LOCAL(int32_t, i32Dst);
4360 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4361 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4362
4363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4364 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4365 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4366
4367 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4368 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i32_r32, pi32Dst, pu32Src);
4369 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4370 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4371
4372 IEM_MC_ADVANCE_RIP_AND_FINISH();
4373 IEM_MC_END();
4374 }
4375 else
4376 {
4377 /* greg, [mem] */
4378 IEM_MC_BEGIN(0, 0);
4379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4380 IEM_MC_LOCAL(int32_t, i32Dst);
4381 IEM_MC_LOCAL(uint32_t, u32Src);
4382 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4383 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4384
4385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4387 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4388 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4389
4390 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4391 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i32_r32, pi32Dst, pu32Src);
4392 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4393 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4394
4395 IEM_MC_ADVANCE_RIP_AND_FINISH();
4396 IEM_MC_END();
4397 }
4398 }
4399}
4400
4401
4402/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4403FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4404{
4405 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4406
4407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4408 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4409 {
4410 if (IEM_IS_MODRM_REG_MODE(bRm))
4411 {
4412 /* greg64, XMM */
4413 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4414 IEM_MC_LOCAL(int64_t, i64Dst);
4415 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4416 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4417
4418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4419 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4420 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4421
4422 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4423 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i64_r64, pi64Dst, pu64Src);
4424 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4425 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4426
4427 IEM_MC_ADVANCE_RIP_AND_FINISH();
4428 IEM_MC_END();
4429 }
4430 else
4431 {
4432 /* greg64, [mem64] */
4433 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4435 IEM_MC_LOCAL(int64_t, i64Dst);
4436 IEM_MC_LOCAL(uint64_t, u64Src);
4437 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4438 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4439
4440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4442 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4443 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4444
4445 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4446 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i64_r64, pi64Dst, pu64Src);
4447 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4448 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4449
4450 IEM_MC_ADVANCE_RIP_AND_FINISH();
4451 IEM_MC_END();
4452 }
4453 }
4454 else
4455 {
4456 if (IEM_IS_MODRM_REG_MODE(bRm))
4457 {
4458 /* greg32, XMM */
4459 IEM_MC_BEGIN(0, 0);
4460 IEM_MC_LOCAL(int32_t, i32Dst);
4461 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4462 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4463
4464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4465 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4466 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4467
4468 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4469 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i32_r64, pi32Dst, pu64Src);
4470 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4471 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4472
4473 IEM_MC_ADVANCE_RIP_AND_FINISH();
4474 IEM_MC_END();
4475 }
4476 else
4477 {
4478 /* greg32, [mem64] */
4479 IEM_MC_BEGIN(0, 0);
4480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4481 IEM_MC_LOCAL(int32_t, i32Dst);
4482 IEM_MC_LOCAL(uint64_t, u64Src);
4483 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4484 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4485
4486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4488 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4489 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4490
4491 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4492 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i32_r64, pi32Dst, pu64Src);
4493 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4494 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4495
4496 IEM_MC_ADVANCE_RIP_AND_FINISH();
4497 IEM_MC_END();
4498 }
4499 }
4500}
4501
4502
4503/**
4504 * @opcode 0x2e
4505 * @oppfx none
4506 * @opflmodify cf,pf,af,zf,sf,of
4507 * @opflclear af,sf,of
4508 */
4509FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4510{
4511 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4513 if (IEM_IS_MODRM_REG_MODE(bRm))
4514 {
4515 /*
4516 * Register, register.
4517 */
4518 IEM_MC_BEGIN(0, 0);
4519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4520 IEM_MC_LOCAL(uint32_t, fEFlags);
4521 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4522 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4523 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4524 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4525 IEM_MC_PREPARE_SSE_USAGE();
4526 IEM_MC_FETCH_EFLAGS(fEFlags);
4527 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4528 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
4529 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomiss_u128, pEFlags, uSrc1, uSrc2);
4530 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4531 IEM_MC_COMMIT_EFLAGS(fEFlags);
4532
4533 IEM_MC_ADVANCE_RIP_AND_FINISH();
4534 IEM_MC_END();
4535 }
4536 else
4537 {
4538 /*
4539 * Register, memory.
4540 */
4541 IEM_MC_BEGIN(0, 0);
4542 IEM_MC_LOCAL(uint32_t, fEFlags);
4543 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4544 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4545 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4547
4548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4550 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4551 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4552
4553 IEM_MC_PREPARE_SSE_USAGE();
4554 IEM_MC_FETCH_EFLAGS(fEFlags);
4555 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4556 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomiss_u128, pEFlags, uSrc1, uSrc2);
4557 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4558 IEM_MC_COMMIT_EFLAGS(fEFlags);
4559
4560 IEM_MC_ADVANCE_RIP_AND_FINISH();
4561 IEM_MC_END();
4562 }
4563}
4564
4565
4566/**
4567 * @opcode 0x2e
4568 * @oppfx 0x66
4569 * @opflmodify cf,pf,af,zf,sf,of
4570 * @opflclear af,sf,of
4571 */
4572FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4573{
4574 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4576 if (IEM_IS_MODRM_REG_MODE(bRm))
4577 {
4578 /*
4579 * Register, register.
4580 */
4581 IEM_MC_BEGIN(0, 0);
4582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4583 IEM_MC_LOCAL(uint32_t, fEFlags);
4584 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4585 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4586 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4587 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4588 IEM_MC_PREPARE_SSE_USAGE();
4589 IEM_MC_FETCH_EFLAGS(fEFlags);
4590 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4591 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
4592 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomisd_u128, pEFlags, uSrc1, uSrc2);
4593 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4594 IEM_MC_COMMIT_EFLAGS(fEFlags);
4595
4596 IEM_MC_ADVANCE_RIP_AND_FINISH();
4597 IEM_MC_END();
4598 }
4599 else
4600 {
4601 /*
4602 * Register, memory.
4603 */
4604 IEM_MC_BEGIN(0, 0);
4605 IEM_MC_LOCAL(uint32_t, fEFlags);
4606 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4607 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4608 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4610
4611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4613 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4614 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4615
4616 IEM_MC_PREPARE_SSE_USAGE();
4617 IEM_MC_FETCH_EFLAGS(fEFlags);
4618 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4619 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomisd_u128, pEFlags, uSrc1, uSrc2);
4620 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4621 IEM_MC_COMMIT_EFLAGS(fEFlags);
4622
4623 IEM_MC_ADVANCE_RIP_AND_FINISH();
4624 IEM_MC_END();
4625 }
4626}
4627
4628
4629/* Opcode 0xf3 0x0f 0x2e - invalid */
4630/* Opcode 0xf2 0x0f 0x2e - invalid */
4631
4632
4633/**
4634 * @opcode 0x2e
4635 * @oppfx none
4636 * @opflmodify cf,pf,af,zf,sf,of
4637 * @opflclear af,sf,of
4638 */
4639FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
4640{
4641 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4643 if (IEM_IS_MODRM_REG_MODE(bRm))
4644 {
4645 /*
4646 * Register, register.
4647 */
4648 IEM_MC_BEGIN(0, 0);
4649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4650 IEM_MC_LOCAL(uint32_t, fEFlags);
4651 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4652 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4653 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4654 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4655 IEM_MC_PREPARE_SSE_USAGE();
4656 IEM_MC_FETCH_EFLAGS(fEFlags);
4657 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4658 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
4659 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comiss_u128, pEFlags, uSrc1, uSrc2);
4660 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4661 IEM_MC_COMMIT_EFLAGS(fEFlags);
4662
4663 IEM_MC_ADVANCE_RIP_AND_FINISH();
4664 IEM_MC_END();
4665 }
4666 else
4667 {
4668 /*
4669 * Register, memory.
4670 */
4671 IEM_MC_BEGIN(0, 0);
4672 IEM_MC_LOCAL(uint32_t, fEFlags);
4673 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4674 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4675 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4677
4678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4680 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4681 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4682
4683 IEM_MC_PREPARE_SSE_USAGE();
4684 IEM_MC_FETCH_EFLAGS(fEFlags);
4685 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4686 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comiss_u128, pEFlags, uSrc1, uSrc2);
4687 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4688 IEM_MC_COMMIT_EFLAGS(fEFlags);
4689
4690 IEM_MC_ADVANCE_RIP_AND_FINISH();
4691 IEM_MC_END();
4692 }
4693}
4694
4695
4696/**
4697 * @opcode 0x2f
4698 * @oppfx 0x66
4699 * @opflmodify cf,pf,af,zf,sf,of
4700 * @opflclear af,sf,of
4701 */
4702FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
4703{
4704 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4706 if (IEM_IS_MODRM_REG_MODE(bRm))
4707 {
4708 /*
4709 * Register, register.
4710 */
4711 IEM_MC_BEGIN(0, 0);
4712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4713 IEM_MC_LOCAL(uint32_t, fEFlags);
4714 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4715 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4716 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4717 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4718 IEM_MC_PREPARE_SSE_USAGE();
4719 IEM_MC_FETCH_EFLAGS(fEFlags);
4720 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4721 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
4722 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comisd_u128, pEFlags, uSrc1, uSrc2);
4723 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4724 IEM_MC_COMMIT_EFLAGS(fEFlags);
4725
4726 IEM_MC_ADVANCE_RIP_AND_FINISH();
4727 IEM_MC_END();
4728 }
4729 else
4730 {
4731 /*
4732 * Register, memory.
4733 */
4734 IEM_MC_BEGIN(0, 0);
4735 IEM_MC_LOCAL(uint32_t, fEFlags);
4736 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4737 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4738 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4740
4741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4743 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4744 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4745
4746 IEM_MC_PREPARE_SSE_USAGE();
4747 IEM_MC_FETCH_EFLAGS(fEFlags);
4748 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4749 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comisd_u128, pEFlags, uSrc1, uSrc2);
4750 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4751 IEM_MC_COMMIT_EFLAGS(fEFlags);
4752
4753 IEM_MC_ADVANCE_RIP_AND_FINISH();
4754 IEM_MC_END();
4755 }
4756}
4757
4758
4759/* Opcode 0xf3 0x0f 0x2f - invalid */
4760/* Opcode 0xf2 0x0f 0x2f - invalid */
4761
4762/** Opcode 0x0f 0x30. */
4763FNIEMOP_DEF(iemOp_wrmsr)
4764{
4765 IEMOP_MNEMONIC(wrmsr, "wrmsr");
4766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4767 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wrmsr);
4768}
4769
4770
4771/** Opcode 0x0f 0x31. */
4772FNIEMOP_DEF(iemOp_rdtsc)
4773{
4774 IEMOP_MNEMONIC(rdtsc, "rdtsc");
4775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4776 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4777 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4778 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4779 iemCImpl_rdtsc);
4780}
4781
4782
4783/** Opcode 0x0f 0x33. */
4784FNIEMOP_DEF(iemOp_rdmsr)
4785{
4786 IEMOP_MNEMONIC(rdmsr, "rdmsr");
4787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4788 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4789 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4790 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4791 iemCImpl_rdmsr);
4792}
4793
4794
4795/** Opcode 0x0f 0x34. */
4796FNIEMOP_DEF(iemOp_rdpmc)
4797{
4798 IEMOP_MNEMONIC(rdpmc, "rdpmc");
4799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4800 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4801 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4802 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4803 iemCImpl_rdpmc);
4804}
4805
4806
4807/** Opcode 0x0f 0x34. */
4808FNIEMOP_DEF(iemOp_sysenter)
4809{
4810 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4812 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
4813 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
4814 iemCImpl_sysenter);
4815}
4816
4817/** Opcode 0x0f 0x35. */
4818FNIEMOP_DEF(iemOp_sysexit)
4819{
4820 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4822 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
4823 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
4824 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
4825}
4826
4827/** Opcode 0x0f 0x37. */
4828FNIEMOP_STUB(iemOp_getsec);
4829
4830
4831/** Opcode 0x0f 0x38. */
4832FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
4833{
4834#ifdef IEM_WITH_THREE_0F_38
4835 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4836 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4837#else
4838 IEMOP_BITCH_ABOUT_STUB();
4839 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4840#endif
4841}
4842
4843
4844/** Opcode 0x0f 0x3a. */
4845FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
4846{
4847#ifdef IEM_WITH_THREE_0F_3A
4848 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4849 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4850#else
4851 IEMOP_BITCH_ABOUT_STUB();
4852 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4853#endif
4854}
4855
4856
4857/**
4858 * Implements a conditional move.
4859 *
4860 * Wish there was an obvious way to do this where we could share and reduce
4861 * code bloat.
4862 *
4863 * @param a_Cnd The conditional "microcode" operation.
4864 */
4865#define CMOV_X(a_Cnd) \
4866 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4867 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4868 { \
4869 switch (pVCpu->iem.s.enmEffOpSize) \
4870 { \
4871 case IEMMODE_16BIT: \
4872 IEM_MC_BEGIN(0, 0); \
4873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4874 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4875 a_Cnd { \
4876 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4877 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4878 } IEM_MC_ENDIF(); \
4879 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4880 IEM_MC_END(); \
4881 break; \
4882 \
4883 case IEMMODE_32BIT: \
4884 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4886 IEM_MC_LOCAL(uint32_t, u32Tmp); \
4887 a_Cnd { \
4888 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4889 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
4890 } IEM_MC_ELSE() { \
4891 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
4892 } IEM_MC_ENDIF(); \
4893 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4894 IEM_MC_END(); \
4895 break; \
4896 \
4897 case IEMMODE_64BIT: \
4898 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4900 IEM_MC_LOCAL(uint64_t, u64Tmp); \
4901 a_Cnd { \
4902 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4903 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
4904 } IEM_MC_ENDIF(); \
4905 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4906 IEM_MC_END(); \
4907 break; \
4908 \
4909 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4910 } \
4911 } \
4912 else \
4913 { \
4914 switch (pVCpu->iem.s.enmEffOpSize) \
4915 { \
4916 case IEMMODE_16BIT: \
4917 IEM_MC_BEGIN(0, 0); \
4918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4919 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4922 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4923 a_Cnd { \
4924 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4925 } IEM_MC_ENDIF(); \
4926 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4927 IEM_MC_END(); \
4928 break; \
4929 \
4930 case IEMMODE_32BIT: \
4931 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4933 IEM_MC_LOCAL(uint32_t, u32Tmp); \
4934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4936 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4937 a_Cnd { \
4938 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
4939 } IEM_MC_ELSE() { \
4940 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
4941 } IEM_MC_ENDIF(); \
4942 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4943 IEM_MC_END(); \
4944 break; \
4945 \
4946 case IEMMODE_64BIT: \
4947 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4949 IEM_MC_LOCAL(uint64_t, u64Tmp); \
4950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4952 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4953 a_Cnd { \
4954 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
4955 } IEM_MC_ENDIF(); \
4956 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4957 IEM_MC_END(); \
4958 break; \
4959 \
4960 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4961 } \
4962 } do {} while (0)
4963
4964
4965
4966/**
4967 * @opcode 0x40
4968 * @opfltest of
4969 */
4970FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
4971{
4972 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
4973 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
4974}
4975
4976
4977/**
4978 * @opcode 0x41
4979 * @opfltest of
4980 */
4981FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
4982{
4983 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
4984 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
4985}
4986
4987
4988/**
4989 * @opcode 0x42
4990 * @opfltest cf
4991 */
4992FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
4993{
4994 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
4995 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
4996}
4997
4998
4999/**
5000 * @opcode 0x43
5001 * @opfltest cf
5002 */
5003FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5004{
5005 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5006 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5007}
5008
5009
5010/**
5011 * @opcode 0x44
5012 * @opfltest zf
5013 */
5014FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5015{
5016 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5017 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5018}
5019
5020
5021/**
5022 * @opcode 0x45
5023 * @opfltest zf
5024 */
5025FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5026{
5027 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5028 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5029}
5030
5031
5032/**
5033 * @opcode 0x46
5034 * @opfltest cf,zf
5035 */
5036FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5037{
5038 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5039 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5040}
5041
5042
5043/**
5044 * @opcode 0x47
5045 * @opfltest cf,zf
5046 */
5047FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5048{
5049 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5050 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5051}
5052
5053
5054/**
5055 * @opcode 0x48
5056 * @opfltest sf
5057 */
5058FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5059{
5060 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5061 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5062}
5063
5064
5065/**
5066 * @opcode 0x49
5067 * @opfltest sf
5068 */
5069FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5070{
5071 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5072 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5073}
5074
5075
5076/**
5077 * @opcode 0x4a
5078 * @opfltest pf
5079 */
5080FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5081{
5082 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5083 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5084}
5085
5086
5087/**
5088 * @opcode 0x4b
5089 * @opfltest pf
5090 */
5091FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5092{
5093 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5094 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5095}
5096
5097
5098/**
5099 * @opcode 0x4c
5100 * @opfltest sf,of
5101 */
5102FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5103{
5104 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5105 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5106}
5107
5108
5109/**
5110 * @opcode 0x4d
5111 * @opfltest sf,of
5112 */
5113FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5114{
5115 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5116 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5117}
5118
5119
5120/**
5121 * @opcode 0x4e
5122 * @opfltest zf,sf,of
5123 */
5124FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5125{
5126 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5127 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5128}
5129
5130
5131/**
5132 * @opcode 0x4e
5133 * @opfltest zf,sf,of
5134 */
5135FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5136{
5137 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5138 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5139}
5140
5141#undef CMOV_X
5142
5143/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5144FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5145{
5146 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5147 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5148 if (IEM_IS_MODRM_REG_MODE(bRm))
5149 {
5150 /*
5151 * Register, register.
5152 */
5153 IEM_MC_BEGIN(0, 0);
5154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5155 IEM_MC_LOCAL(uint8_t, u8Dst);
5156 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5157 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5158 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5159 IEM_MC_PREPARE_SSE_USAGE();
5160 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5161 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5162 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5163 IEM_MC_ADVANCE_RIP_AND_FINISH();
5164 IEM_MC_END();
5165 }
5166 /* No memory operand. */
5167 else
5168 IEMOP_RAISE_INVALID_OPCODE_RET();
5169}
5170
5171
5172/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5173FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5174{
5175 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5176 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5177 if (IEM_IS_MODRM_REG_MODE(bRm))
5178 {
5179 /*
5180 * Register, register.
5181 */
5182 IEM_MC_BEGIN(0, 0);
5183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5184 IEM_MC_LOCAL(uint8_t, u8Dst);
5185 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5186 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5187 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5188 IEM_MC_PREPARE_SSE_USAGE();
5189 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5190 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5191 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5192 IEM_MC_ADVANCE_RIP_AND_FINISH();
5193 IEM_MC_END();
5194 }
5195 /* No memory operand. */
5196 else
5197 IEMOP_RAISE_INVALID_OPCODE_RET();
5198
5199}
5200
5201
5202/* Opcode 0xf3 0x0f 0x50 - invalid */
5203/* Opcode 0xf2 0x0f 0x50 - invalid */
5204
5205
5206/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5207FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5208{
5209 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5210 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5211}
5212
5213
5214/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5215FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5216{
5217 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5218 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5219}
5220
5221
5222/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5223FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5224{
5225 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5226 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5227}
5228
5229
5230/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5231FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5232{
5233 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5234 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5235}
5236
5237
5238/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5239FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5240{
5241 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5242 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5243}
5244
5245
5246/* Opcode 0x66 0x0f 0x52 - invalid */
5247
5248
5249/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5250FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5251{
5252 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5253 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5254}
5255
5256
5257/* Opcode 0xf2 0x0f 0x52 - invalid */
5258
5259
5260/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5261FNIEMOP_DEF(iemOp_rcpps_Vps_Wps)
5262{
5263 IEMOP_MNEMONIC2(RM, RCPPS, rcpps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5264 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rcpps_u128);
5265}
5266
5267
5268/* Opcode 0x66 0x0f 0x53 - invalid */
5269
5270
5271/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5272FNIEMOP_DEF(iemOp_rcpss_Vss_Wss)
5273{
5274 IEMOP_MNEMONIC2(RM, RCPSS, rcpss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5275 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rcpss_u128_r32);
5276}
5277
5278
5279/* Opcode 0xf2 0x0f 0x53 - invalid */
5280
5281
5282/** Opcode 0x0f 0x54 - andps Vps, Wps */
5283FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5284{
5285 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5286 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_pand_u128);
5287}
5288
5289
5290/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5291FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5292{
5293 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5294 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pand_u128);
5295}
5296
5297
5298/* Opcode 0xf3 0x0f 0x54 - invalid */
5299/* Opcode 0xf2 0x0f 0x54 - invalid */
5300
5301
5302/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5303FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5304{
5305 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5306 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_pandn_u128);
5307}
5308
5309
5310/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5311FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5312{
5313 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5314 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pandn_u128);
5315}
5316
5317
5318/* Opcode 0xf3 0x0f 0x55 - invalid */
5319/* Opcode 0xf2 0x0f 0x55 - invalid */
5320
5321
5322/** Opcode 0x0f 0x56 - orps Vps, Wps */
5323FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5324{
5325 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5326 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_por_u128);
5327}
5328
5329
5330/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5331FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5332{
5333 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5334 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_por_u128);
5335}
5336
5337
5338/* Opcode 0xf3 0x0f 0x56 - invalid */
5339/* Opcode 0xf2 0x0f 0x56 - invalid */
5340
5341
5342/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5343FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5344{
5345 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5346 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_pxor_u128);
5347}
5348
5349
5350/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5351FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5352{
5353 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5354 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pxor_u128);
5355}
5356
5357
5358/* Opcode 0xf3 0x0f 0x57 - invalid */
5359/* Opcode 0xf2 0x0f 0x57 - invalid */
5360
5361/** Opcode 0x0f 0x58 - addps Vps, Wps */
5362FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5363{
5364 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5365 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5366}
5367
5368
5369/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5370FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5371{
5372 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5373 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5374}
5375
5376
5377/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5378FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5379{
5380 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5381 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5382}
5383
5384
5385/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5386FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5387{
5388 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5389 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5390}
5391
5392
5393/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5394FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5395{
5396 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5397 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5398}
5399
5400
5401/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5402FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5403{
5404 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5405 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5406}
5407
5408
5409/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5410FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5411{
5412 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5413 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5414}
5415
5416
5417/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5418FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5419{
5420 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5421 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5422}
5423
5424
5425/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5426FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5427{
5428 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5429 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5430}
5431
5432
5433/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5434FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5435{
5436 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5437 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5438}
5439
5440
5441/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5442FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5443{
5444 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5445 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5446}
5447
5448
5449/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5450FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5451{
5452 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5453 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5454}
5455
5456
5457/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5458FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5459{
5460 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5461 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5462}
5463
5464
5465/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5466FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5467{
5468 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5469 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5470}
5471
5472
5473/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5474FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5475{
5476 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5477 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5478}
5479
5480
5481/* Opcode 0xf2 0x0f 0x5b - invalid */
5482
5483
5484/** Opcode 0x0f 0x5c - subps Vps, Wps */
5485FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5486{
5487 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5488 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5489}
5490
5491
5492/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5493FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5494{
5495 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5496 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5497}
5498
5499
5500/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5501FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5502{
5503 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5504 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5505}
5506
5507
5508/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5509FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5510{
5511 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5512 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5513}
5514
5515
5516/** Opcode 0x0f 0x5d - minps Vps, Wps */
5517FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5518{
5519 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5520 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5521}
5522
5523
5524/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5525FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5526{
5527 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5528 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5529}
5530
5531
5532/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5533FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5534{
5535 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5536 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5537}
5538
5539
5540/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5541FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5542{
5543 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5544 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5545}
5546
5547
5548/** Opcode 0x0f 0x5e - divps Vps, Wps */
5549FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5550{
5551 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5552 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5553}
5554
5555
5556/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5557FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5558{
5559 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5560 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5561}
5562
5563
5564/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5565FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5566{
5567 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5568 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5569}
5570
5571
5572/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5573FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5574{
5575 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5576 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5577}
5578
5579
5580/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5581FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5582{
5583 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5584 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5585}
5586
5587
5588/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5589FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5590{
5591 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5592 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5593}
5594
5595
5596/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5597FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5598{
5599 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5600 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5601}
5602
5603
5604/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5605FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5606{
5607 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5608 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5609}
5610
5611
5612/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5613FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5614{
5615 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5616 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5617}
5618
5619
5620/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5621FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5622{
5623 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5624 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5625}
5626
5627
5628/* Opcode 0xf3 0x0f 0x60 - invalid */
5629
5630
5631/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5632FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5633{
5634 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5635 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5636 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5637}
5638
5639
5640/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5641FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5642{
5643 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5644 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5645}
5646
5647
5648/* Opcode 0xf3 0x0f 0x61 - invalid */
5649
5650
5651/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5652FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5653{
5654 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5655 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5656}
5657
5658
5659/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5660FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5661{
5662 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5663 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5664}
5665
5666
5667/* Opcode 0xf3 0x0f 0x62 - invalid */
5668
5669
5670
5671/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5672FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5673{
5674 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5675 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5676}
5677
5678
5679/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5680FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5681{
5682 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5683 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5684}
5685
5686
5687/* Opcode 0xf3 0x0f 0x63 - invalid */
5688
5689
5690/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5691FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5692{
5693 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5694 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5695}
5696
5697
5698/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5699FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5700{
5701 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5702 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
5703}
5704
5705
5706/* Opcode 0xf3 0x0f 0x64 - invalid */
5707
5708
5709/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
5710FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
5711{
5712 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5713 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
5714}
5715
5716
5717/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
5718FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
5719{
5720 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5721 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
5722}
5723
5724
5725/* Opcode 0xf3 0x0f 0x65 - invalid */
5726
5727
5728/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
5729FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
5730{
5731 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5732 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
5733}
5734
5735
5736/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
5737FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
5738{
5739 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5740 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
5741}
5742
5743
5744/* Opcode 0xf3 0x0f 0x66 - invalid */
5745
5746
5747/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
5748FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
5749{
5750 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5751 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
5752}
5753
5754
5755/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
5756FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
5757{
5758 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5759 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
5760}
5761
5762
5763/* Opcode 0xf3 0x0f 0x67 - invalid */
5764
5765
5766/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
5767 * @note Intel and AMD both uses Qd for the second parameter, however they
5768 * both list it as a mmX/mem64 operand and intel describes it as being
5769 * loaded as a qword, so it should be Qq, shouldn't it? */
5770FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
5771{
5772 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5773 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
5774}
5775
5776
5777/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
5778FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
5779{
5780 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5781 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
5782}
5783
5784
5785/* Opcode 0xf3 0x0f 0x68 - invalid */
5786
5787
5788/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
5789 * @note Intel and AMD both uses Qd for the second parameter, however they
5790 * both list it as a mmX/mem64 operand and intel describes it as being
5791 * loaded as a qword, so it should be Qq, shouldn't it? */
5792FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
5793{
5794 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5795 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
5796}
5797
5798
5799/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
5800FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
5801{
5802 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5803 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
5804
5805}
5806
5807
5808/* Opcode 0xf3 0x0f 0x69 - invalid */
5809
5810
5811/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
5812 * @note Intel and AMD both uses Qd for the second parameter, however they
5813 * both list it as a mmX/mem64 operand and intel describes it as being
5814 * loaded as a qword, so it should be Qq, shouldn't it? */
5815FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
5816{
5817 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5818 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
5819}
5820
5821
5822/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
5823FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
5824{
5825 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5826 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
5827}
5828
5829
5830/* Opcode 0xf3 0x0f 0x6a - invalid */
5831
5832
5833/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
5834FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
5835{
5836 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5837 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
5838}
5839
5840
5841/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
5842FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
5843{
5844 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5845 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
5846}
5847
5848
5849/* Opcode 0xf3 0x0f 0x6b - invalid */
5850
5851
5852/* Opcode 0x0f 0x6c - invalid */
5853
5854
5855/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
5856FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
5857{
5858 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5859 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
5860}
5861
5862
5863/* Opcode 0xf3 0x0f 0x6c - invalid */
5864/* Opcode 0xf2 0x0f 0x6c - invalid */
5865
5866
5867/* Opcode 0x0f 0x6d - invalid */
5868
5869
5870/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
5871FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
5872{
5873 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5874 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
5875}
5876
5877
5878/* Opcode 0xf3 0x0f 0x6d - invalid */
5879
5880
5881FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
5882{
5883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5884 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5885 {
5886 /**
5887 * @opcode 0x6e
5888 * @opcodesub rex.w=1
5889 * @oppfx none
5890 * @opcpuid mmx
5891 * @opgroup og_mmx_datamove
5892 * @opxcpttype 5
5893 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
5894 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
5895 */
5896 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5897 if (IEM_IS_MODRM_REG_MODE(bRm))
5898 {
5899 /* MMX, greg64 */
5900 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
5902 IEM_MC_LOCAL(uint64_t, u64Tmp);
5903
5904 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5905 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5906 IEM_MC_FPU_TO_MMX_MODE();
5907
5908 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
5909 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
5910
5911 IEM_MC_ADVANCE_RIP_AND_FINISH();
5912 IEM_MC_END();
5913 }
5914 else
5915 {
5916 /* MMX, [mem64] */
5917 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5919 IEM_MC_LOCAL(uint64_t, u64Tmp);
5920
5921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
5923 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5924 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5925
5926 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5927 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
5928 IEM_MC_FPU_TO_MMX_MODE();
5929
5930 IEM_MC_ADVANCE_RIP_AND_FINISH();
5931 IEM_MC_END();
5932 }
5933 }
5934 else
5935 {
5936 /**
5937 * @opdone
5938 * @opcode 0x6e
5939 * @opcodesub rex.w=0
5940 * @oppfx none
5941 * @opcpuid mmx
5942 * @opgroup og_mmx_datamove
5943 * @opxcpttype 5
5944 * @opfunction iemOp_movd_q_Pd_Ey
5945 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
5946 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
5947 */
5948 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5949 if (IEM_IS_MODRM_REG_MODE(bRm))
5950 {
5951 /* MMX, greg32 */
5952 IEM_MC_BEGIN(0, 0);
5953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
5954 IEM_MC_LOCAL(uint32_t, u32Tmp);
5955
5956 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5957 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5958 IEM_MC_FPU_TO_MMX_MODE();
5959
5960 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
5961 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
5962
5963 IEM_MC_ADVANCE_RIP_AND_FINISH();
5964 IEM_MC_END();
5965 }
5966 else
5967 {
5968 /* MMX, [mem32] */
5969 IEM_MC_BEGIN(0, 0);
5970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5971 IEM_MC_LOCAL(uint32_t, u32Tmp);
5972
5973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
5975 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5976 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5977
5978 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5979 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
5980 IEM_MC_FPU_TO_MMX_MODE();
5981
5982 IEM_MC_ADVANCE_RIP_AND_FINISH();
5983 IEM_MC_END();
5984 }
5985 }
5986}
5987
5988FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
5989{
5990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5991 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5992 {
5993 /**
5994 * @opcode 0x6e
5995 * @opcodesub rex.w=1
5996 * @oppfx 0x66
5997 * @opcpuid sse2
5998 * @opgroup og_sse2_simdint_datamove
5999 * @opxcpttype 5
6000 * @optest 64-bit / op1=1 op2=2 -> op1=2
6001 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6002 */
6003 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6004 if (IEM_IS_MODRM_REG_MODE(bRm))
6005 {
6006 /* XMM, greg64 */
6007 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6009 IEM_MC_LOCAL(uint64_t, u64Tmp);
6010
6011 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6012 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6013
6014 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6015 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6016
6017 IEM_MC_ADVANCE_RIP_AND_FINISH();
6018 IEM_MC_END();
6019 }
6020 else
6021 {
6022 /* XMM, [mem64] */
6023 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6025 IEM_MC_LOCAL(uint64_t, u64Tmp);
6026
6027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6029 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6030 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6031
6032 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6033 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6034
6035 IEM_MC_ADVANCE_RIP_AND_FINISH();
6036 IEM_MC_END();
6037 }
6038 }
6039 else
6040 {
6041 /**
6042 * @opdone
6043 * @opcode 0x6e
6044 * @opcodesub rex.w=0
6045 * @oppfx 0x66
6046 * @opcpuid sse2
6047 * @opgroup og_sse2_simdint_datamove
6048 * @opxcpttype 5
6049 * @opfunction iemOp_movd_q_Vy_Ey
6050 * @optest op1=1 op2=2 -> op1=2
6051 * @optest op1=0 op2=-42 -> op1=-42
6052 */
6053 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6054 if (IEM_IS_MODRM_REG_MODE(bRm))
6055 {
6056 /* XMM, greg32 */
6057 IEM_MC_BEGIN(0, 0);
6058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6059 IEM_MC_LOCAL(uint32_t, u32Tmp);
6060
6061 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6062 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6063
6064 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6065 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6066
6067 IEM_MC_ADVANCE_RIP_AND_FINISH();
6068 IEM_MC_END();
6069 }
6070 else
6071 {
6072 /* XMM, [mem32] */
6073 IEM_MC_BEGIN(0, 0);
6074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6075 IEM_MC_LOCAL(uint32_t, u32Tmp);
6076
6077 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6079 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6080 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6081
6082 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6083 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6084
6085 IEM_MC_ADVANCE_RIP_AND_FINISH();
6086 IEM_MC_END();
6087 }
6088 }
6089}
6090
6091/* Opcode 0xf3 0x0f 0x6e - invalid */
6092
6093
6094/**
6095 * @opcode 0x6f
6096 * @oppfx none
6097 * @opcpuid mmx
6098 * @opgroup og_mmx_datamove
6099 * @opxcpttype 5
6100 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6101 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6102 */
6103FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6104{
6105 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6107 if (IEM_IS_MODRM_REG_MODE(bRm))
6108 {
6109 /*
6110 * Register, register.
6111 */
6112 IEM_MC_BEGIN(0, 0);
6113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6114 IEM_MC_LOCAL(uint64_t, u64Tmp);
6115
6116 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6117 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6118 IEM_MC_FPU_TO_MMX_MODE();
6119
6120 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6121 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6122
6123 IEM_MC_ADVANCE_RIP_AND_FINISH();
6124 IEM_MC_END();
6125 }
6126 else
6127 {
6128 /*
6129 * Register, memory.
6130 */
6131 IEM_MC_BEGIN(0, 0);
6132 IEM_MC_LOCAL(uint64_t, u64Tmp);
6133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6134
6135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6137 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6138 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6139
6140 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6141 IEM_MC_FPU_TO_MMX_MODE();
6142
6143 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6144
6145 IEM_MC_ADVANCE_RIP_AND_FINISH();
6146 IEM_MC_END();
6147 }
6148}
6149
6150/**
6151 * @opcode 0x6f
6152 * @oppfx 0x66
6153 * @opcpuid sse2
6154 * @opgroup og_sse2_simdint_datamove
6155 * @opxcpttype 1
6156 * @optest op1=1 op2=2 -> op1=2
6157 * @optest op1=0 op2=-42 -> op1=-42
6158 */
6159FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6160{
6161 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6163 if (IEM_IS_MODRM_REG_MODE(bRm))
6164 {
6165 /*
6166 * Register, register.
6167 */
6168 IEM_MC_BEGIN(0, 0);
6169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6170
6171 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6172 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6173
6174 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6175 IEM_GET_MODRM_RM(pVCpu, bRm));
6176 IEM_MC_ADVANCE_RIP_AND_FINISH();
6177 IEM_MC_END();
6178 }
6179 else
6180 {
6181 /*
6182 * Register, memory.
6183 */
6184 IEM_MC_BEGIN(0, 0);
6185 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6187
6188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6190 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6191 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6192
6193 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6194 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6195
6196 IEM_MC_ADVANCE_RIP_AND_FINISH();
6197 IEM_MC_END();
6198 }
6199}
6200
6201/**
6202 * @opcode 0x6f
6203 * @oppfx 0xf3
6204 * @opcpuid sse2
6205 * @opgroup og_sse2_simdint_datamove
6206 * @opxcpttype 4UA
6207 * @optest op1=1 op2=2 -> op1=2
6208 * @optest op1=0 op2=-42 -> op1=-42
6209 */
6210FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6211{
6212 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6214 if (IEM_IS_MODRM_REG_MODE(bRm))
6215 {
6216 /*
6217 * Register, register.
6218 */
6219 IEM_MC_BEGIN(0, 0);
6220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6221 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6222 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6223 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6224 IEM_GET_MODRM_RM(pVCpu, bRm));
6225 IEM_MC_ADVANCE_RIP_AND_FINISH();
6226 IEM_MC_END();
6227 }
6228 else
6229 {
6230 /*
6231 * Register, memory.
6232 */
6233 IEM_MC_BEGIN(0, 0);
6234 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6236
6237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6239 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6240 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6241 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6242 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6243
6244 IEM_MC_ADVANCE_RIP_AND_FINISH();
6245 IEM_MC_END();
6246 }
6247}
6248
6249
6250/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6251FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6252{
6253 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6255 if (IEM_IS_MODRM_REG_MODE(bRm))
6256 {
6257 /*
6258 * Register, register.
6259 */
6260 IEM_MC_BEGIN(0, 0);
6261 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6263 IEM_MC_ARG(uint64_t *, pDst, 0);
6264 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6265 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6267 IEM_MC_PREPARE_FPU_USAGE();
6268 IEM_MC_FPU_TO_MMX_MODE();
6269
6270 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6271 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6272 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6273 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6274
6275 IEM_MC_ADVANCE_RIP_AND_FINISH();
6276 IEM_MC_END();
6277 }
6278 else
6279 {
6280 /*
6281 * Register, memory.
6282 */
6283 IEM_MC_BEGIN(0, 0);
6284 IEM_MC_ARG(uint64_t *, pDst, 0);
6285 IEM_MC_LOCAL(uint64_t, uSrc);
6286 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6288
6289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6290 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6291 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6293 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6294 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6295
6296 IEM_MC_PREPARE_FPU_USAGE();
6297 IEM_MC_FPU_TO_MMX_MODE();
6298
6299 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6300 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6301 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6302
6303 IEM_MC_ADVANCE_RIP_AND_FINISH();
6304 IEM_MC_END();
6305 }
6306}
6307
6308
6309/**
6310 * Common worker for SSE2 instructions on the forms:
6311 * pshufd xmm1, xmm2/mem128, imm8
6312 * pshufhw xmm1, xmm2/mem128, imm8
6313 * pshuflw xmm1, xmm2/mem128, imm8
6314 *
6315 * Proper alignment of the 128-bit operand is enforced.
6316 * Exceptions type 4. SSE2 cpuid checks.
6317 */
6318FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6319{
6320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6321 if (IEM_IS_MODRM_REG_MODE(bRm))
6322 {
6323 /*
6324 * Register, register.
6325 */
6326 IEM_MC_BEGIN(0, 0);
6327 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6329 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6330 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6331 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6332 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6333 IEM_MC_PREPARE_SSE_USAGE();
6334 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6335 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6336 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6337 IEM_MC_ADVANCE_RIP_AND_FINISH();
6338 IEM_MC_END();
6339 }
6340 else
6341 {
6342 /*
6343 * Register, memory.
6344 */
6345 IEM_MC_BEGIN(0, 0);
6346 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6347 IEM_MC_LOCAL(RTUINT128U, uSrc);
6348 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6350
6351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6352 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6353 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6355 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6356
6357 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6358 IEM_MC_PREPARE_SSE_USAGE();
6359 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6360 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6361
6362 IEM_MC_ADVANCE_RIP_AND_FINISH();
6363 IEM_MC_END();
6364 }
6365}
6366
6367
6368/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6369FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6370{
6371 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6372 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6373}
6374
6375
6376/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6377FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6378{
6379 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6380 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6381}
6382
6383
6384/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6385FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6386{
6387 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6388 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6389}
6390
6391
6392/**
6393 * Common worker for MMX instructions of the form:
6394 * psrlw mm, imm8
6395 * psraw mm, imm8
6396 * psllw mm, imm8
6397 * psrld mm, imm8
6398 * psrad mm, imm8
6399 * pslld mm, imm8
6400 * psrlq mm, imm8
6401 * psllq mm, imm8
6402 *
6403 */
6404FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6405{
6406 if (IEM_IS_MODRM_REG_MODE(bRm))
6407 {
6408 /*
6409 * Register, immediate.
6410 */
6411 IEM_MC_BEGIN(0, 0);
6412 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6414 IEM_MC_ARG(uint64_t *, pDst, 0);
6415 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6416 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6417 IEM_MC_PREPARE_FPU_USAGE();
6418 IEM_MC_FPU_TO_MMX_MODE();
6419
6420 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6421 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6422 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6423
6424 IEM_MC_ADVANCE_RIP_AND_FINISH();
6425 IEM_MC_END();
6426 }
6427 else
6428 {
6429 /*
6430 * Register, memory not supported.
6431 */
6432 /// @todo Caller already enforced register mode?!
6433 AssertFailedReturn(VINF_SUCCESS);
6434 }
6435}
6436
6437
6438/**
6439 * Common worker for SSE2 instructions of the form:
6440 * psrlw xmm, imm8
6441 * psraw xmm, imm8
6442 * psllw xmm, imm8
6443 * psrld xmm, imm8
6444 * psrad xmm, imm8
6445 * pslld xmm, imm8
6446 * psrlq xmm, imm8
6447 * psllq xmm, imm8
6448 *
6449 */
6450FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6451{
6452 if (IEM_IS_MODRM_REG_MODE(bRm))
6453 {
6454 /*
6455 * Register, immediate.
6456 */
6457 IEM_MC_BEGIN(0, 0);
6458 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6460 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6461 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6462 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6463 IEM_MC_PREPARE_SSE_USAGE();
6464 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6465 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6466 IEM_MC_ADVANCE_RIP_AND_FINISH();
6467 IEM_MC_END();
6468 }
6469 else
6470 {
6471 /*
6472 * Register, memory.
6473 */
6474 /// @todo Caller already enforced register mode?!
6475 AssertFailedReturn(VINF_SUCCESS);
6476 }
6477}
6478
6479
6480/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6481FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6482{
6483// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6484 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6485}
6486
6487
6488/** Opcode 0x66 0x0f 0x71 11/2. */
6489FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6490{
6491// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6492 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6493}
6494
6495
6496/** Opcode 0x0f 0x71 11/4. */
6497FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6498{
6499// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6500 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6501}
6502
6503
6504/** Opcode 0x66 0x0f 0x71 11/4. */
6505FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6506{
6507// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6508 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6509}
6510
6511
6512/** Opcode 0x0f 0x71 11/6. */
6513FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6514{
6515// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6516 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6517}
6518
6519
6520/** Opcode 0x66 0x0f 0x71 11/6. */
6521FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6522{
6523// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6524 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6525}
6526
6527
6528/**
6529 * Group 12 jump table for register variant.
6530 */
6531IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6532{
6533 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6534 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6535 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6536 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6537 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6538 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6539 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6540 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6541};
6542AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6543
6544
6545/** Opcode 0x0f 0x71. */
6546FNIEMOP_DEF(iemOp_Grp12)
6547{
6548 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6549 if (IEM_IS_MODRM_REG_MODE(bRm))
6550 /* register, register */
6551 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6552 + pVCpu->iem.s.idxPrefix], bRm);
6553 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6554}
6555
6556
6557/** Opcode 0x0f 0x72 11/2. */
6558FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6559{
6560// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6561 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6562}
6563
6564
6565/** Opcode 0x66 0x0f 0x72 11/2. */
6566FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6567{
6568// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6569 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6570}
6571
6572
6573/** Opcode 0x0f 0x72 11/4. */
6574FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6575{
6576// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6577 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6578}
6579
6580
6581/** Opcode 0x66 0x0f 0x72 11/4. */
6582FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6583{
6584// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6585 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6586}
6587
6588
6589/** Opcode 0x0f 0x72 11/6. */
6590FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6591{
6592// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6593 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6594}
6595
6596/** Opcode 0x66 0x0f 0x72 11/6. */
6597FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6598{
6599// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6600 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6601}
6602
6603
6604/**
6605 * Group 13 jump table for register variant.
6606 */
6607IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6608{
6609 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6610 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6611 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6612 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6613 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6614 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6615 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6616 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6617};
6618AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6619
6620/** Opcode 0x0f 0x72. */
6621FNIEMOP_DEF(iemOp_Grp13)
6622{
6623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6624 if (IEM_IS_MODRM_REG_MODE(bRm))
6625 /* register, register */
6626 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6627 + pVCpu->iem.s.idxPrefix], bRm);
6628 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6629}
6630
6631
6632/** Opcode 0x0f 0x73 11/2. */
6633FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6634{
6635// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6636 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6637}
6638
6639
6640/** Opcode 0x66 0x0f 0x73 11/2. */
6641FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6642{
6643// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6644 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6645}
6646
6647
6648/** Opcode 0x66 0x0f 0x73 11/3. */
6649FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6650{
6651// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6652 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6653}
6654
6655
6656/** Opcode 0x0f 0x73 11/6. */
6657FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6658{
6659// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6660 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6661}
6662
6663
6664/** Opcode 0x66 0x0f 0x73 11/6. */
6665FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6666{
6667// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6668 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6669}
6670
6671
6672/** Opcode 0x66 0x0f 0x73 11/7. */
6673FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6674{
6675// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6676 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6677}
6678
6679/**
6680 * Group 14 jump table for register variant.
6681 */
6682IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6683{
6684 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6685 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6686 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6687 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6688 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6689 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6690 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6691 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6692};
6693AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6694
6695
6696/** Opcode 0x0f 0x73. */
6697FNIEMOP_DEF(iemOp_Grp14)
6698{
6699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6700 if (IEM_IS_MODRM_REG_MODE(bRm))
6701 /* register, register */
6702 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6703 + pVCpu->iem.s.idxPrefix], bRm);
6704 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6705}
6706
6707
6708/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
6709FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
6710{
6711 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6712 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
6713}
6714
6715
6716/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
6717FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
6718{
6719 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6720 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
6721}
6722
6723
6724/* Opcode 0xf3 0x0f 0x74 - invalid */
6725/* Opcode 0xf2 0x0f 0x74 - invalid */
6726
6727
6728/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
6729FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
6730{
6731 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6732 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
6733}
6734
6735
6736/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
6737FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
6738{
6739 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6740 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
6741}
6742
6743
6744/* Opcode 0xf3 0x0f 0x75 - invalid */
6745/* Opcode 0xf2 0x0f 0x75 - invalid */
6746
6747
6748/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
6749FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
6750{
6751 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6752 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
6753}
6754
6755
6756/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
6757FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
6758{
6759 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6760 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
6761}
6762
6763
6764/* Opcode 0xf3 0x0f 0x76 - invalid */
6765/* Opcode 0xf2 0x0f 0x76 - invalid */
6766
6767
6768/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
6769FNIEMOP_DEF(iemOp_emms)
6770{
6771 IEMOP_MNEMONIC(emms, "emms");
6772 IEM_MC_BEGIN(0, 0);
6773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6774 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6775 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6776 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6777 IEM_MC_FPU_FROM_MMX_MODE();
6778 IEM_MC_ADVANCE_RIP_AND_FINISH();
6779 IEM_MC_END();
6780}
6781
6782/* Opcode 0x66 0x0f 0x77 - invalid */
6783/* Opcode 0xf3 0x0f 0x77 - invalid */
6784/* Opcode 0xf2 0x0f 0x77 - invalid */
6785
6786/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
6787#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6788FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
6789{
6790 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
6791 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
6792 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
6793 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
6794
6795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6796 if (IEM_IS_MODRM_REG_MODE(bRm))
6797 {
6798 /*
6799 * Register, register.
6800 */
6801 if (enmEffOpSize == IEMMODE_64BIT)
6802 {
6803 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6804 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6805 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6806 IEM_MC_ARG(uint64_t, u64Enc, 1);
6807 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6808 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6809 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
6810 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
6811 iemCImpl_vmread_reg64, pu64Dst, u64Enc);
6812 IEM_MC_END();
6813 }
6814 else
6815 {
6816 IEM_MC_BEGIN(0, 0);
6817 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6819 IEM_MC_ARG(uint32_t, u32Enc, 1);
6820 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6821 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6822 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
6823 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
6824 iemCImpl_vmread_reg32, pu64Dst, u32Enc);
6825 IEM_MC_END();
6826 }
6827 }
6828 else
6829 {
6830 /*
6831 * Memory, register.
6832 */
6833 if (enmEffOpSize == IEMMODE_64BIT)
6834 {
6835 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6836 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6838 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6839 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
6840 IEM_MC_ARG(uint64_t, u64Enc, 2);
6841 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6842 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
6843 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
6844 IEM_MC_END();
6845 }
6846 else
6847 {
6848 IEM_MC_BEGIN(0, 0);
6849 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6851 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6852 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
6853 IEM_MC_ARG(uint32_t, u32Enc, 2);
6854 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6855 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
6856 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
6857 IEM_MC_END();
6858 }
6859 }
6860}
6861#else
6862FNIEMOP_UD_STUB(iemOp_vmread_Ey_Gy);
6863#endif
6864
6865/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
6866FNIEMOP_STUB(iemOp_AmdGrp17);
6867/* Opcode 0xf3 0x0f 0x78 - invalid */
6868/* Opcode 0xf2 0x0f 0x78 - invalid */
6869
6870/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
6871#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6872FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
6873{
6874 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
6875 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
6876 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
6877 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
6878
6879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6880 if (IEM_IS_MODRM_REG_MODE(bRm))
6881 {
6882 /*
6883 * Register, register.
6884 */
6885 if (enmEffOpSize == IEMMODE_64BIT)
6886 {
6887 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6888 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6889 IEM_MC_ARG(uint64_t, u64Val, 0);
6890 IEM_MC_ARG(uint64_t, u64Enc, 1);
6891 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
6892 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6893 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u64Val, u64Enc);
6894 IEM_MC_END();
6895 }
6896 else
6897 {
6898 IEM_MC_BEGIN(0, 0);
6899 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6900 IEM_MC_ARG(uint32_t, u32Val, 0);
6901 IEM_MC_ARG(uint32_t, u32Enc, 1);
6902 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
6903 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6904 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u32Val, u32Enc);
6905 IEM_MC_END();
6906 }
6907 }
6908 else
6909 {
6910 /*
6911 * Register, memory.
6912 */
6913 if (enmEffOpSize == IEMMODE_64BIT)
6914 {
6915 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6916 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6918 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6919 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
6920 IEM_MC_ARG(uint64_t, u64Enc, 2);
6921 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6922 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
6923 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
6924 IEM_MC_END();
6925 }
6926 else
6927 {
6928 IEM_MC_BEGIN(0, 0);
6929 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6931 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6932 IEM_MC_ARG(uint32_t, u32Enc, 2);
6933 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
6934 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6935 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
6936 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
6937 IEM_MC_END();
6938 }
6939 }
6940}
6941#else
6942FNIEMOP_UD_STUB(iemOp_vmwrite_Gy_Ey);
6943#endif
6944/* Opcode 0x66 0x0f 0x79 - invalid */
6945/* Opcode 0xf3 0x0f 0x79 - invalid */
6946/* Opcode 0xf2 0x0f 0x79 - invalid */
6947
6948/* Opcode 0x0f 0x7a - invalid */
6949/* Opcode 0x66 0x0f 0x7a - invalid */
6950/* Opcode 0xf3 0x0f 0x7a - invalid */
6951/* Opcode 0xf2 0x0f 0x7a - invalid */
6952
6953/* Opcode 0x0f 0x7b - invalid */
6954/* Opcode 0x66 0x0f 0x7b - invalid */
6955/* Opcode 0xf3 0x0f 0x7b - invalid */
6956/* Opcode 0xf2 0x0f 0x7b - invalid */
6957
6958/* Opcode 0x0f 0x7c - invalid */
6959
6960
6961/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
6962FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
6963{
6964 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
6965 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
6966}
6967
6968
6969/* Opcode 0xf3 0x0f 0x7c - invalid */
6970
6971
6972/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
6973FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
6974{
6975 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
6976 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
6977}
6978
6979
6980/* Opcode 0x0f 0x7d - invalid */
6981
6982
6983/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
6984FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
6985{
6986 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
6987 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
6988}
6989
6990
6991/* Opcode 0xf3 0x0f 0x7d - invalid */
6992
6993
6994/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
6995FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
6996{
6997 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
6998 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
6999}
7000
7001
7002/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7003FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7004{
7005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7006 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7007 {
7008 /**
7009 * @opcode 0x7e
7010 * @opcodesub rex.w=1
7011 * @oppfx none
7012 * @opcpuid mmx
7013 * @opgroup og_mmx_datamove
7014 * @opxcpttype 5
7015 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7016 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7017 */
7018 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7019 if (IEM_IS_MODRM_REG_MODE(bRm))
7020 {
7021 /* greg64, MMX */
7022 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7024 IEM_MC_LOCAL(uint64_t, u64Tmp);
7025
7026 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7027 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7028 IEM_MC_FPU_TO_MMX_MODE();
7029
7030 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7031 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7032
7033 IEM_MC_ADVANCE_RIP_AND_FINISH();
7034 IEM_MC_END();
7035 }
7036 else
7037 {
7038 /* [mem64], MMX */
7039 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7041 IEM_MC_LOCAL(uint64_t, u64Tmp);
7042
7043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7045 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7046 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7047
7048 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7049 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7050 IEM_MC_FPU_TO_MMX_MODE();
7051
7052 IEM_MC_ADVANCE_RIP_AND_FINISH();
7053 IEM_MC_END();
7054 }
7055 }
7056 else
7057 {
7058 /**
7059 * @opdone
7060 * @opcode 0x7e
7061 * @opcodesub rex.w=0
7062 * @oppfx none
7063 * @opcpuid mmx
7064 * @opgroup og_mmx_datamove
7065 * @opxcpttype 5
7066 * @opfunction iemOp_movd_q_Pd_Ey
7067 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7068 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7069 */
7070 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7071 if (IEM_IS_MODRM_REG_MODE(bRm))
7072 {
7073 /* greg32, MMX */
7074 IEM_MC_BEGIN(0, 0);
7075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7076 IEM_MC_LOCAL(uint32_t, u32Tmp);
7077
7078 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7079 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7080 IEM_MC_FPU_TO_MMX_MODE();
7081
7082 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm), 0);
7083 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7084
7085 IEM_MC_ADVANCE_RIP_AND_FINISH();
7086 IEM_MC_END();
7087 }
7088 else
7089 {
7090 /* [mem32], MMX */
7091 IEM_MC_BEGIN(0, 0);
7092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7093 IEM_MC_LOCAL(uint32_t, u32Tmp);
7094
7095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7097 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7098 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7099
7100 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm), 0);
7101 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7102 IEM_MC_FPU_TO_MMX_MODE();
7103
7104 IEM_MC_ADVANCE_RIP_AND_FINISH();
7105 IEM_MC_END();
7106 }
7107 }
7108}
7109
7110
7111FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7112{
7113 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7114 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7115 {
7116 /**
7117 * @opcode 0x7e
7118 * @opcodesub rex.w=1
7119 * @oppfx 0x66
7120 * @opcpuid sse2
7121 * @opgroup og_sse2_simdint_datamove
7122 * @opxcpttype 5
7123 * @optest 64-bit / op1=1 op2=2 -> op1=2
7124 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7125 */
7126 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7127 if (IEM_IS_MODRM_REG_MODE(bRm))
7128 {
7129 /* greg64, XMM */
7130 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7132 IEM_MC_LOCAL(uint64_t, u64Tmp);
7133
7134 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7135 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7136
7137 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7138 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7139
7140 IEM_MC_ADVANCE_RIP_AND_FINISH();
7141 IEM_MC_END();
7142 }
7143 else
7144 {
7145 /* [mem64], XMM */
7146 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7148 IEM_MC_LOCAL(uint64_t, u64Tmp);
7149
7150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7152 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7153 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7154
7155 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7156 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7157
7158 IEM_MC_ADVANCE_RIP_AND_FINISH();
7159 IEM_MC_END();
7160 }
7161 }
7162 else
7163 {
7164 /**
7165 * @opdone
7166 * @opcode 0x7e
7167 * @opcodesub rex.w=0
7168 * @oppfx 0x66
7169 * @opcpuid sse2
7170 * @opgroup og_sse2_simdint_datamove
7171 * @opxcpttype 5
7172 * @opfunction iemOp_movd_q_Vy_Ey
7173 * @optest op1=1 op2=2 -> op1=2
7174 * @optest op1=0 op2=-42 -> op1=-42
7175 */
7176 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7177 if (IEM_IS_MODRM_REG_MODE(bRm))
7178 {
7179 /* greg32, XMM */
7180 IEM_MC_BEGIN(0, 0);
7181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7182 IEM_MC_LOCAL(uint32_t, u32Tmp);
7183
7184 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7185 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7186
7187 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7188 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7189
7190 IEM_MC_ADVANCE_RIP_AND_FINISH();
7191 IEM_MC_END();
7192 }
7193 else
7194 {
7195 /* [mem32], XMM */
7196 IEM_MC_BEGIN(0, 0);
7197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7198 IEM_MC_LOCAL(uint32_t, u32Tmp);
7199
7200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7202 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7203 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7204
7205 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7206 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7207
7208 IEM_MC_ADVANCE_RIP_AND_FINISH();
7209 IEM_MC_END();
7210 }
7211 }
7212}
7213
7214/**
7215 * @opcode 0x7e
7216 * @oppfx 0xf3
7217 * @opcpuid sse2
7218 * @opgroup og_sse2_pcksclr_datamove
7219 * @opxcpttype none
7220 * @optest op1=1 op2=2 -> op1=2
7221 * @optest op1=0 op2=-42 -> op1=-42
7222 */
7223FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7224{
7225 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7227 if (IEM_IS_MODRM_REG_MODE(bRm))
7228 {
7229 /*
7230 * XMM128, XMM64.
7231 */
7232 IEM_MC_BEGIN(0, 0);
7233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7234 IEM_MC_LOCAL(uint64_t, uSrc);
7235
7236 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7237 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7238
7239 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7240 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7241
7242 IEM_MC_ADVANCE_RIP_AND_FINISH();
7243 IEM_MC_END();
7244 }
7245 else
7246 {
7247 /*
7248 * XMM128, [mem64].
7249 */
7250 IEM_MC_BEGIN(0, 0);
7251 IEM_MC_LOCAL(uint64_t, uSrc);
7252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7253
7254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7256 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7257 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7258
7259 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7260 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7261
7262 IEM_MC_ADVANCE_RIP_AND_FINISH();
7263 IEM_MC_END();
7264 }
7265}
7266
7267/* Opcode 0xf2 0x0f 0x7e - invalid */
7268
7269
7270/** Opcode 0x0f 0x7f - movq Qq, Pq */
7271FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7272{
7273 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7274 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7275 if (IEM_IS_MODRM_REG_MODE(bRm))
7276 {
7277 /*
7278 * MMX, MMX.
7279 */
7280 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7281 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7282 IEM_MC_BEGIN(0, 0);
7283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7284 IEM_MC_LOCAL(uint64_t, u64Tmp);
7285 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7286 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7287 IEM_MC_FPU_TO_MMX_MODE();
7288
7289 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7290 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7291
7292 IEM_MC_ADVANCE_RIP_AND_FINISH();
7293 IEM_MC_END();
7294 }
7295 else
7296 {
7297 /*
7298 * [mem64], MMX.
7299 */
7300 IEM_MC_BEGIN(0, 0);
7301 IEM_MC_LOCAL(uint64_t, u64Tmp);
7302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7303
7304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7306 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7307 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7308
7309 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7310 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7311 IEM_MC_FPU_TO_MMX_MODE();
7312
7313 IEM_MC_ADVANCE_RIP_AND_FINISH();
7314 IEM_MC_END();
7315 }
7316}
7317
7318/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7319FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7320{
7321 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7322 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7323 if (IEM_IS_MODRM_REG_MODE(bRm))
7324 {
7325 /*
7326 * XMM, XMM.
7327 */
7328 IEM_MC_BEGIN(0, 0);
7329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7330 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7331 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7332 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7333 IEM_GET_MODRM_REG(pVCpu, bRm));
7334 IEM_MC_ADVANCE_RIP_AND_FINISH();
7335 IEM_MC_END();
7336 }
7337 else
7338 {
7339 /*
7340 * [mem128], XMM.
7341 */
7342 IEM_MC_BEGIN(0, 0);
7343 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7345
7346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7348 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7349 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7350
7351 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7352 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7353
7354 IEM_MC_ADVANCE_RIP_AND_FINISH();
7355 IEM_MC_END();
7356 }
7357}
7358
7359/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7360FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7361{
7362 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7363 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7364 if (IEM_IS_MODRM_REG_MODE(bRm))
7365 {
7366 /*
7367 * XMM, XMM.
7368 */
7369 IEM_MC_BEGIN(0, 0);
7370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7371 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7372 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7373 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7374 IEM_GET_MODRM_REG(pVCpu, bRm));
7375 IEM_MC_ADVANCE_RIP_AND_FINISH();
7376 IEM_MC_END();
7377 }
7378 else
7379 {
7380 /*
7381 * [mem128], XMM.
7382 */
7383 IEM_MC_BEGIN(0, 0);
7384 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7386
7387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7389 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7390 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7391
7392 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7393 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7394
7395 IEM_MC_ADVANCE_RIP_AND_FINISH();
7396 IEM_MC_END();
7397 }
7398}
7399
7400/* Opcode 0xf2 0x0f 0x7f - invalid */
7401
7402
7403/**
7404 * @opcode 0x80
7405 * @opfltest of
7406 */
7407FNIEMOP_DEF(iemOp_jo_Jv)
7408{
7409 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7410 IEMOP_HLP_MIN_386();
7411 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7412 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7413 {
7414 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7415 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7417 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7418 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7419 } IEM_MC_ELSE() {
7420 IEM_MC_ADVANCE_RIP_AND_FINISH();
7421 } IEM_MC_ENDIF();
7422 IEM_MC_END();
7423 }
7424 else
7425 {
7426 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7427 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7429 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7430 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7431 } IEM_MC_ELSE() {
7432 IEM_MC_ADVANCE_RIP_AND_FINISH();
7433 } IEM_MC_ENDIF();
7434 IEM_MC_END();
7435 }
7436}
7437
7438
7439/**
7440 * @opcode 0x81
7441 * @opfltest of
7442 */
7443FNIEMOP_DEF(iemOp_jno_Jv)
7444{
7445 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7446 IEMOP_HLP_MIN_386();
7447 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7448 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7449 {
7450 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7451 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7453 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7454 IEM_MC_ADVANCE_RIP_AND_FINISH();
7455 } IEM_MC_ELSE() {
7456 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7457 } IEM_MC_ENDIF();
7458 IEM_MC_END();
7459 }
7460 else
7461 {
7462 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7463 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7465 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7466 IEM_MC_ADVANCE_RIP_AND_FINISH();
7467 } IEM_MC_ELSE() {
7468 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7469 } IEM_MC_ENDIF();
7470 IEM_MC_END();
7471 }
7472}
7473
7474
7475/**
7476 * @opcode 0x82
7477 * @opfltest cf
7478 */
7479FNIEMOP_DEF(iemOp_jc_Jv)
7480{
7481 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7482 IEMOP_HLP_MIN_386();
7483 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7484 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7485 {
7486 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7487 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7489 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7490 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7491 } IEM_MC_ELSE() {
7492 IEM_MC_ADVANCE_RIP_AND_FINISH();
7493 } IEM_MC_ENDIF();
7494 IEM_MC_END();
7495 }
7496 else
7497 {
7498 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7499 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7501 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7502 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7503 } IEM_MC_ELSE() {
7504 IEM_MC_ADVANCE_RIP_AND_FINISH();
7505 } IEM_MC_ENDIF();
7506 IEM_MC_END();
7507 }
7508}
7509
7510
7511/**
7512 * @opcode 0x83
7513 * @opfltest cf
7514 */
7515FNIEMOP_DEF(iemOp_jnc_Jv)
7516{
7517 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7518 IEMOP_HLP_MIN_386();
7519 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7520 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7521 {
7522 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7523 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7525 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7526 IEM_MC_ADVANCE_RIP_AND_FINISH();
7527 } IEM_MC_ELSE() {
7528 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7529 } IEM_MC_ENDIF();
7530 IEM_MC_END();
7531 }
7532 else
7533 {
7534 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7535 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7537 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7538 IEM_MC_ADVANCE_RIP_AND_FINISH();
7539 } IEM_MC_ELSE() {
7540 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7541 } IEM_MC_ENDIF();
7542 IEM_MC_END();
7543 }
7544}
7545
7546
7547/**
7548 * @opcode 0x84
7549 * @opfltest zf
7550 */
7551FNIEMOP_DEF(iemOp_je_Jv)
7552{
7553 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7554 IEMOP_HLP_MIN_386();
7555 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7556 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7557 {
7558 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7559 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7561 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7562 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7563 } IEM_MC_ELSE() {
7564 IEM_MC_ADVANCE_RIP_AND_FINISH();
7565 } IEM_MC_ENDIF();
7566 IEM_MC_END();
7567 }
7568 else
7569 {
7570 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7571 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7573 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7574 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7575 } IEM_MC_ELSE() {
7576 IEM_MC_ADVANCE_RIP_AND_FINISH();
7577 } IEM_MC_ENDIF();
7578 IEM_MC_END();
7579 }
7580}
7581
7582
7583/**
7584 * @opcode 0x85
7585 * @opfltest zf
7586 */
7587FNIEMOP_DEF(iemOp_jne_Jv)
7588{
7589 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7590 IEMOP_HLP_MIN_386();
7591 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7592 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7593 {
7594 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7595 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7597 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7598 IEM_MC_ADVANCE_RIP_AND_FINISH();
7599 } IEM_MC_ELSE() {
7600 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7601 } IEM_MC_ENDIF();
7602 IEM_MC_END();
7603 }
7604 else
7605 {
7606 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7607 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7609 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7610 IEM_MC_ADVANCE_RIP_AND_FINISH();
7611 } IEM_MC_ELSE() {
7612 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7613 } IEM_MC_ENDIF();
7614 IEM_MC_END();
7615 }
7616}
7617
7618
7619/**
7620 * @opcode 0x86
7621 * @opfltest cf,zf
7622 */
7623FNIEMOP_DEF(iemOp_jbe_Jv)
7624{
7625 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7626 IEMOP_HLP_MIN_386();
7627 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7628 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7629 {
7630 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7631 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7633 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7634 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7635 } IEM_MC_ELSE() {
7636 IEM_MC_ADVANCE_RIP_AND_FINISH();
7637 } IEM_MC_ENDIF();
7638 IEM_MC_END();
7639 }
7640 else
7641 {
7642 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7643 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7645 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7646 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7647 } IEM_MC_ELSE() {
7648 IEM_MC_ADVANCE_RIP_AND_FINISH();
7649 } IEM_MC_ENDIF();
7650 IEM_MC_END();
7651 }
7652}
7653
7654
7655/**
7656 * @opcode 0x87
7657 * @opfltest cf,zf
7658 */
7659FNIEMOP_DEF(iemOp_jnbe_Jv)
7660{
7661 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7662 IEMOP_HLP_MIN_386();
7663 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7664 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7665 {
7666 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7667 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7669 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7670 IEM_MC_ADVANCE_RIP_AND_FINISH();
7671 } IEM_MC_ELSE() {
7672 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7673 } IEM_MC_ENDIF();
7674 IEM_MC_END();
7675 }
7676 else
7677 {
7678 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7679 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7681 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7682 IEM_MC_ADVANCE_RIP_AND_FINISH();
7683 } IEM_MC_ELSE() {
7684 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7685 } IEM_MC_ENDIF();
7686 IEM_MC_END();
7687 }
7688}
7689
7690
7691/**
7692 * @opcode 0x88
7693 * @opfltest sf
7694 */
7695FNIEMOP_DEF(iemOp_js_Jv)
7696{
7697 IEMOP_MNEMONIC(js_Jv, "js Jv");
7698 IEMOP_HLP_MIN_386();
7699 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7700 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7701 {
7702 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7703 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7705 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7706 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7707 } IEM_MC_ELSE() {
7708 IEM_MC_ADVANCE_RIP_AND_FINISH();
7709 } IEM_MC_ENDIF();
7710 IEM_MC_END();
7711 }
7712 else
7713 {
7714 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7715 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7717 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7718 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7719 } IEM_MC_ELSE() {
7720 IEM_MC_ADVANCE_RIP_AND_FINISH();
7721 } IEM_MC_ENDIF();
7722 IEM_MC_END();
7723 }
7724}
7725
7726
7727/**
7728 * @opcode 0x89
7729 * @opfltest sf
7730 */
7731FNIEMOP_DEF(iemOp_jns_Jv)
7732{
7733 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
7734 IEMOP_HLP_MIN_386();
7735 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7736 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7737 {
7738 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7739 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7741 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7742 IEM_MC_ADVANCE_RIP_AND_FINISH();
7743 } IEM_MC_ELSE() {
7744 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7745 } IEM_MC_ENDIF();
7746 IEM_MC_END();
7747 }
7748 else
7749 {
7750 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7751 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7753 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7754 IEM_MC_ADVANCE_RIP_AND_FINISH();
7755 } IEM_MC_ELSE() {
7756 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7757 } IEM_MC_ENDIF();
7758 IEM_MC_END();
7759 }
7760}
7761
7762
7763/**
7764 * @opcode 0x8a
7765 * @opfltest pf
7766 */
7767FNIEMOP_DEF(iemOp_jp_Jv)
7768{
7769 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
7770 IEMOP_HLP_MIN_386();
7771 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7772 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7773 {
7774 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7775 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7777 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7778 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7779 } IEM_MC_ELSE() {
7780 IEM_MC_ADVANCE_RIP_AND_FINISH();
7781 } IEM_MC_ENDIF();
7782 IEM_MC_END();
7783 }
7784 else
7785 {
7786 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7787 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7789 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7790 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7791 } IEM_MC_ELSE() {
7792 IEM_MC_ADVANCE_RIP_AND_FINISH();
7793 } IEM_MC_ENDIF();
7794 IEM_MC_END();
7795 }
7796}
7797
7798
7799/**
7800 * @opcode 0x8b
7801 * @opfltest pf
7802 */
7803FNIEMOP_DEF(iemOp_jnp_Jv)
7804{
7805 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
7806 IEMOP_HLP_MIN_386();
7807 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7808 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7809 {
7810 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7811 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7813 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7814 IEM_MC_ADVANCE_RIP_AND_FINISH();
7815 } IEM_MC_ELSE() {
7816 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7817 } IEM_MC_ENDIF();
7818 IEM_MC_END();
7819 }
7820 else
7821 {
7822 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7823 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7825 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7826 IEM_MC_ADVANCE_RIP_AND_FINISH();
7827 } IEM_MC_ELSE() {
7828 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7829 } IEM_MC_ENDIF();
7830 IEM_MC_END();
7831 }
7832}
7833
7834
7835/**
7836 * @opcode 0x8c
7837 * @opfltest sf,of
7838 */
7839FNIEMOP_DEF(iemOp_jl_Jv)
7840{
7841 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
7842 IEMOP_HLP_MIN_386();
7843 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7844 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7845 {
7846 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7847 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7849 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7850 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7851 } IEM_MC_ELSE() {
7852 IEM_MC_ADVANCE_RIP_AND_FINISH();
7853 } IEM_MC_ENDIF();
7854 IEM_MC_END();
7855 }
7856 else
7857 {
7858 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7859 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7861 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7862 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7863 } IEM_MC_ELSE() {
7864 IEM_MC_ADVANCE_RIP_AND_FINISH();
7865 } IEM_MC_ENDIF();
7866 IEM_MC_END();
7867 }
7868}
7869
7870
7871/**
7872 * @opcode 0x8d
7873 * @opfltest sf,of
7874 */
7875FNIEMOP_DEF(iemOp_jnl_Jv)
7876{
7877 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
7878 IEMOP_HLP_MIN_386();
7879 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7880 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7881 {
7882 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7883 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7885 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7886 IEM_MC_ADVANCE_RIP_AND_FINISH();
7887 } IEM_MC_ELSE() {
7888 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7889 } IEM_MC_ENDIF();
7890 IEM_MC_END();
7891 }
7892 else
7893 {
7894 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7895 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7897 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7898 IEM_MC_ADVANCE_RIP_AND_FINISH();
7899 } IEM_MC_ELSE() {
7900 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7901 } IEM_MC_ENDIF();
7902 IEM_MC_END();
7903 }
7904}
7905
7906
7907/**
7908 * @opcode 0x8e
7909 * @opfltest zf,sf,of
7910 */
7911FNIEMOP_DEF(iemOp_jle_Jv)
7912{
7913 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
7914 IEMOP_HLP_MIN_386();
7915 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7916 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7917 {
7918 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7919 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7921 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7922 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7923 } IEM_MC_ELSE() {
7924 IEM_MC_ADVANCE_RIP_AND_FINISH();
7925 } IEM_MC_ENDIF();
7926 IEM_MC_END();
7927 }
7928 else
7929 {
7930 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7931 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7933 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7934 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7935 } IEM_MC_ELSE() {
7936 IEM_MC_ADVANCE_RIP_AND_FINISH();
7937 } IEM_MC_ENDIF();
7938 IEM_MC_END();
7939 }
7940}
7941
7942
7943/**
7944 * @opcode 0x8f
7945 * @opfltest zf,sf,of
7946 */
7947FNIEMOP_DEF(iemOp_jnle_Jv)
7948{
7949 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
7950 IEMOP_HLP_MIN_386();
7951 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7952 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7953 {
7954 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7955 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7957 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7958 IEM_MC_ADVANCE_RIP_AND_FINISH();
7959 } IEM_MC_ELSE() {
7960 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7961 } IEM_MC_ENDIF();
7962 IEM_MC_END();
7963 }
7964 else
7965 {
7966 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7967 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7969 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7970 IEM_MC_ADVANCE_RIP_AND_FINISH();
7971 } IEM_MC_ELSE() {
7972 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7973 } IEM_MC_ENDIF();
7974 IEM_MC_END();
7975 }
7976}
7977
7978
7979/**
7980 * @opcode 0x90
7981 * @opfltest of
7982 */
7983FNIEMOP_DEF(iemOp_seto_Eb)
7984{
7985 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
7986 IEMOP_HLP_MIN_386();
7987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7988
7989 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7990 * any way. AMD says it's "unused", whatever that means. We're
7991 * ignoring for now. */
7992 if (IEM_IS_MODRM_REG_MODE(bRm))
7993 {
7994 /* register target */
7995 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7997 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7998 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7999 } IEM_MC_ELSE() {
8000 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8001 } IEM_MC_ENDIF();
8002 IEM_MC_ADVANCE_RIP_AND_FINISH();
8003 IEM_MC_END();
8004 }
8005 else
8006 {
8007 /* memory target */
8008 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8012 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8013 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8014 } IEM_MC_ELSE() {
8015 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8016 } IEM_MC_ENDIF();
8017 IEM_MC_ADVANCE_RIP_AND_FINISH();
8018 IEM_MC_END();
8019 }
8020}
8021
8022
8023/**
8024 * @opcode 0x91
8025 * @opfltest of
8026 */
8027FNIEMOP_DEF(iemOp_setno_Eb)
8028{
8029 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8030 IEMOP_HLP_MIN_386();
8031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8032
8033 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8034 * any way. AMD says it's "unused", whatever that means. We're
8035 * ignoring for now. */
8036 if (IEM_IS_MODRM_REG_MODE(bRm))
8037 {
8038 /* register target */
8039 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8041 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8042 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8043 } IEM_MC_ELSE() {
8044 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8045 } IEM_MC_ENDIF();
8046 IEM_MC_ADVANCE_RIP_AND_FINISH();
8047 IEM_MC_END();
8048 }
8049 else
8050 {
8051 /* memory target */
8052 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8056 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8057 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8058 } IEM_MC_ELSE() {
8059 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8060 } IEM_MC_ENDIF();
8061 IEM_MC_ADVANCE_RIP_AND_FINISH();
8062 IEM_MC_END();
8063 }
8064}
8065
8066
8067/**
8068 * @opcode 0x92
8069 * @opfltest cf
8070 */
8071FNIEMOP_DEF(iemOp_setc_Eb)
8072{
8073 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8074 IEMOP_HLP_MIN_386();
8075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8076
8077 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8078 * any way. AMD says it's "unused", whatever that means. We're
8079 * ignoring for now. */
8080 if (IEM_IS_MODRM_REG_MODE(bRm))
8081 {
8082 /* register target */
8083 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8085 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8086 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8087 } IEM_MC_ELSE() {
8088 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8089 } IEM_MC_ENDIF();
8090 IEM_MC_ADVANCE_RIP_AND_FINISH();
8091 IEM_MC_END();
8092 }
8093 else
8094 {
8095 /* memory target */
8096 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8100 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8101 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8102 } IEM_MC_ELSE() {
8103 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8104 } IEM_MC_ENDIF();
8105 IEM_MC_ADVANCE_RIP_AND_FINISH();
8106 IEM_MC_END();
8107 }
8108}
8109
8110
8111/**
8112 * @opcode 0x93
8113 * @opfltest cf
8114 */
8115FNIEMOP_DEF(iemOp_setnc_Eb)
8116{
8117 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8118 IEMOP_HLP_MIN_386();
8119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8120
8121 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8122 * any way. AMD says it's "unused", whatever that means. We're
8123 * ignoring for now. */
8124 if (IEM_IS_MODRM_REG_MODE(bRm))
8125 {
8126 /* register target */
8127 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8129 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8130 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8131 } IEM_MC_ELSE() {
8132 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8133 } IEM_MC_ENDIF();
8134 IEM_MC_ADVANCE_RIP_AND_FINISH();
8135 IEM_MC_END();
8136 }
8137 else
8138 {
8139 /* memory target */
8140 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8144 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8145 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8146 } IEM_MC_ELSE() {
8147 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8148 } IEM_MC_ENDIF();
8149 IEM_MC_ADVANCE_RIP_AND_FINISH();
8150 IEM_MC_END();
8151 }
8152}
8153
8154
8155/**
8156 * @opcode 0x94
8157 * @opfltest zf
8158 */
8159FNIEMOP_DEF(iemOp_sete_Eb)
8160{
8161 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8162 IEMOP_HLP_MIN_386();
8163 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8164
8165 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8166 * any way. AMD says it's "unused", whatever that means. We're
8167 * ignoring for now. */
8168 if (IEM_IS_MODRM_REG_MODE(bRm))
8169 {
8170 /* register target */
8171 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8173 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8174 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8175 } IEM_MC_ELSE() {
8176 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8177 } IEM_MC_ENDIF();
8178 IEM_MC_ADVANCE_RIP_AND_FINISH();
8179 IEM_MC_END();
8180 }
8181 else
8182 {
8183 /* memory target */
8184 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8188 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8189 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8190 } IEM_MC_ELSE() {
8191 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8192 } IEM_MC_ENDIF();
8193 IEM_MC_ADVANCE_RIP_AND_FINISH();
8194 IEM_MC_END();
8195 }
8196}
8197
8198
8199/**
8200 * @opcode 0x95
8201 * @opfltest zf
8202 */
8203FNIEMOP_DEF(iemOp_setne_Eb)
8204{
8205 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8206 IEMOP_HLP_MIN_386();
8207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8208
8209 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8210 * any way. AMD says it's "unused", whatever that means. We're
8211 * ignoring for now. */
8212 if (IEM_IS_MODRM_REG_MODE(bRm))
8213 {
8214 /* register target */
8215 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8217 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8218 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8219 } IEM_MC_ELSE() {
8220 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8221 } IEM_MC_ENDIF();
8222 IEM_MC_ADVANCE_RIP_AND_FINISH();
8223 IEM_MC_END();
8224 }
8225 else
8226 {
8227 /* memory target */
8228 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8232 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8233 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8234 } IEM_MC_ELSE() {
8235 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8236 } IEM_MC_ENDIF();
8237 IEM_MC_ADVANCE_RIP_AND_FINISH();
8238 IEM_MC_END();
8239 }
8240}
8241
8242
8243/**
8244 * @opcode 0x96
8245 * @opfltest cf,zf
8246 */
8247FNIEMOP_DEF(iemOp_setbe_Eb)
8248{
8249 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8250 IEMOP_HLP_MIN_386();
8251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8252
8253 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8254 * any way. AMD says it's "unused", whatever that means. We're
8255 * ignoring for now. */
8256 if (IEM_IS_MODRM_REG_MODE(bRm))
8257 {
8258 /* register target */
8259 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8261 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8262 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8263 } IEM_MC_ELSE() {
8264 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8265 } IEM_MC_ENDIF();
8266 IEM_MC_ADVANCE_RIP_AND_FINISH();
8267 IEM_MC_END();
8268 }
8269 else
8270 {
8271 /* memory target */
8272 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8276 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8277 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8278 } IEM_MC_ELSE() {
8279 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8280 } IEM_MC_ENDIF();
8281 IEM_MC_ADVANCE_RIP_AND_FINISH();
8282 IEM_MC_END();
8283 }
8284}
8285
8286
8287/**
8288 * @opcode 0x97
8289 * @opfltest cf,zf
8290 */
8291FNIEMOP_DEF(iemOp_setnbe_Eb)
8292{
8293 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8294 IEMOP_HLP_MIN_386();
8295 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8296
8297 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8298 * any way. AMD says it's "unused", whatever that means. We're
8299 * ignoring for now. */
8300 if (IEM_IS_MODRM_REG_MODE(bRm))
8301 {
8302 /* register target */
8303 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8305 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8306 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8307 } IEM_MC_ELSE() {
8308 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8309 } IEM_MC_ENDIF();
8310 IEM_MC_ADVANCE_RIP_AND_FINISH();
8311 IEM_MC_END();
8312 }
8313 else
8314 {
8315 /* memory target */
8316 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8320 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8321 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8322 } IEM_MC_ELSE() {
8323 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8324 } IEM_MC_ENDIF();
8325 IEM_MC_ADVANCE_RIP_AND_FINISH();
8326 IEM_MC_END();
8327 }
8328}
8329
8330
8331/**
8332 * @opcode 0x98
8333 * @opfltest sf
8334 */
8335FNIEMOP_DEF(iemOp_sets_Eb)
8336{
8337 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8338 IEMOP_HLP_MIN_386();
8339 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8340
8341 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8342 * any way. AMD says it's "unused", whatever that means. We're
8343 * ignoring for now. */
8344 if (IEM_IS_MODRM_REG_MODE(bRm))
8345 {
8346 /* register target */
8347 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8349 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8350 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8351 } IEM_MC_ELSE() {
8352 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8353 } IEM_MC_ENDIF();
8354 IEM_MC_ADVANCE_RIP_AND_FINISH();
8355 IEM_MC_END();
8356 }
8357 else
8358 {
8359 /* memory target */
8360 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8364 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8365 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8366 } IEM_MC_ELSE() {
8367 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8368 } IEM_MC_ENDIF();
8369 IEM_MC_ADVANCE_RIP_AND_FINISH();
8370 IEM_MC_END();
8371 }
8372}
8373
8374
8375/**
8376 * @opcode 0x99
8377 * @opfltest sf
8378 */
8379FNIEMOP_DEF(iemOp_setns_Eb)
8380{
8381 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8382 IEMOP_HLP_MIN_386();
8383 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8384
8385 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8386 * any way. AMD says it's "unused", whatever that means. We're
8387 * ignoring for now. */
8388 if (IEM_IS_MODRM_REG_MODE(bRm))
8389 {
8390 /* register target */
8391 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8393 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8394 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8395 } IEM_MC_ELSE() {
8396 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8397 } IEM_MC_ENDIF();
8398 IEM_MC_ADVANCE_RIP_AND_FINISH();
8399 IEM_MC_END();
8400 }
8401 else
8402 {
8403 /* memory target */
8404 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8408 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8409 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8410 } IEM_MC_ELSE() {
8411 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8412 } IEM_MC_ENDIF();
8413 IEM_MC_ADVANCE_RIP_AND_FINISH();
8414 IEM_MC_END();
8415 }
8416}
8417
8418
8419/**
8420 * @opcode 0x9a
8421 * @opfltest pf
8422 */
8423FNIEMOP_DEF(iemOp_setp_Eb)
8424{
8425 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8426 IEMOP_HLP_MIN_386();
8427 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8428
8429 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8430 * any way. AMD says it's "unused", whatever that means. We're
8431 * ignoring for now. */
8432 if (IEM_IS_MODRM_REG_MODE(bRm))
8433 {
8434 /* register target */
8435 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8437 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8438 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8439 } IEM_MC_ELSE() {
8440 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8441 } IEM_MC_ENDIF();
8442 IEM_MC_ADVANCE_RIP_AND_FINISH();
8443 IEM_MC_END();
8444 }
8445 else
8446 {
8447 /* memory target */
8448 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8452 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8453 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8454 } IEM_MC_ELSE() {
8455 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8456 } IEM_MC_ENDIF();
8457 IEM_MC_ADVANCE_RIP_AND_FINISH();
8458 IEM_MC_END();
8459 }
8460}
8461
8462
8463/**
8464 * @opcode 0x9b
8465 * @opfltest pf
8466 */
8467FNIEMOP_DEF(iemOp_setnp_Eb)
8468{
8469 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8470 IEMOP_HLP_MIN_386();
8471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8472
8473 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8474 * any way. AMD says it's "unused", whatever that means. We're
8475 * ignoring for now. */
8476 if (IEM_IS_MODRM_REG_MODE(bRm))
8477 {
8478 /* register target */
8479 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8481 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8482 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8483 } IEM_MC_ELSE() {
8484 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8485 } IEM_MC_ENDIF();
8486 IEM_MC_ADVANCE_RIP_AND_FINISH();
8487 IEM_MC_END();
8488 }
8489 else
8490 {
8491 /* memory target */
8492 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8496 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8497 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8498 } IEM_MC_ELSE() {
8499 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8500 } IEM_MC_ENDIF();
8501 IEM_MC_ADVANCE_RIP_AND_FINISH();
8502 IEM_MC_END();
8503 }
8504}
8505
8506
8507/**
8508 * @opcode 0x9c
8509 * @opfltest sf,of
8510 */
8511FNIEMOP_DEF(iemOp_setl_Eb)
8512{
8513 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8514 IEMOP_HLP_MIN_386();
8515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8516
8517 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8518 * any way. AMD says it's "unused", whatever that means. We're
8519 * ignoring for now. */
8520 if (IEM_IS_MODRM_REG_MODE(bRm))
8521 {
8522 /* register target */
8523 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8525 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8526 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8527 } IEM_MC_ELSE() {
8528 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8529 } IEM_MC_ENDIF();
8530 IEM_MC_ADVANCE_RIP_AND_FINISH();
8531 IEM_MC_END();
8532 }
8533 else
8534 {
8535 /* memory target */
8536 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8540 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8541 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8542 } IEM_MC_ELSE() {
8543 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8544 } IEM_MC_ENDIF();
8545 IEM_MC_ADVANCE_RIP_AND_FINISH();
8546 IEM_MC_END();
8547 }
8548}
8549
8550
8551/**
8552 * @opcode 0x9d
8553 * @opfltest sf,of
8554 */
8555FNIEMOP_DEF(iemOp_setnl_Eb)
8556{
8557 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8558 IEMOP_HLP_MIN_386();
8559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8560
8561 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8562 * any way. AMD says it's "unused", whatever that means. We're
8563 * ignoring for now. */
8564 if (IEM_IS_MODRM_REG_MODE(bRm))
8565 {
8566 /* register target */
8567 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8569 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8570 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8571 } IEM_MC_ELSE() {
8572 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8573 } IEM_MC_ENDIF();
8574 IEM_MC_ADVANCE_RIP_AND_FINISH();
8575 IEM_MC_END();
8576 }
8577 else
8578 {
8579 /* memory target */
8580 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8584 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8585 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8586 } IEM_MC_ELSE() {
8587 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8588 } IEM_MC_ENDIF();
8589 IEM_MC_ADVANCE_RIP_AND_FINISH();
8590 IEM_MC_END();
8591 }
8592}
8593
8594
8595/**
8596 * @opcode 0x9e
8597 * @opfltest zf,sf,of
8598 */
8599FNIEMOP_DEF(iemOp_setle_Eb)
8600{
8601 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8602 IEMOP_HLP_MIN_386();
8603 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8604
8605 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8606 * any way. AMD says it's "unused", whatever that means. We're
8607 * ignoring for now. */
8608 if (IEM_IS_MODRM_REG_MODE(bRm))
8609 {
8610 /* register target */
8611 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8613 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8614 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8615 } IEM_MC_ELSE() {
8616 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8617 } IEM_MC_ENDIF();
8618 IEM_MC_ADVANCE_RIP_AND_FINISH();
8619 IEM_MC_END();
8620 }
8621 else
8622 {
8623 /* memory target */
8624 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8628 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8629 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8630 } IEM_MC_ELSE() {
8631 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8632 } IEM_MC_ENDIF();
8633 IEM_MC_ADVANCE_RIP_AND_FINISH();
8634 IEM_MC_END();
8635 }
8636}
8637
8638
8639/**
8640 * @opcode 0x9f
8641 * @opfltest zf,sf,of
8642 */
8643FNIEMOP_DEF(iemOp_setnle_Eb)
8644{
8645 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8646 IEMOP_HLP_MIN_386();
8647 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8648
8649 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8650 * any way. AMD says it's "unused", whatever that means. We're
8651 * ignoring for now. */
8652 if (IEM_IS_MODRM_REG_MODE(bRm))
8653 {
8654 /* register target */
8655 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8657 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8658 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8659 } IEM_MC_ELSE() {
8660 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8661 } IEM_MC_ENDIF();
8662 IEM_MC_ADVANCE_RIP_AND_FINISH();
8663 IEM_MC_END();
8664 }
8665 else
8666 {
8667 /* memory target */
8668 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8672 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8673 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8674 } IEM_MC_ELSE() {
8675 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8676 } IEM_MC_ENDIF();
8677 IEM_MC_ADVANCE_RIP_AND_FINISH();
8678 IEM_MC_END();
8679 }
8680}
8681
8682
8683/** Opcode 0x0f 0xa0. */
8684FNIEMOP_DEF(iemOp_push_fs)
8685{
8686 IEMOP_MNEMONIC(push_fs, "push fs");
8687 IEMOP_HLP_MIN_386();
8688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8689 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8690}
8691
8692
8693/** Opcode 0x0f 0xa1. */
8694FNIEMOP_DEF(iemOp_pop_fs)
8695{
8696 IEMOP_MNEMONIC(pop_fs, "pop fs");
8697 IEMOP_HLP_MIN_386();
8698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8699 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8700 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
8701 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8702 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8703 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8704 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8705 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS),
8706 iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8707}
8708
8709
8710/** Opcode 0x0f 0xa2. */
8711FNIEMOP_DEF(iemOp_cpuid)
8712{
8713 IEMOP_MNEMONIC(cpuid, "cpuid");
8714 IEMOP_HLP_MIN_486(); /* not all 486es. */
8715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8716 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
8717 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8718 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
8719 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
8720 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX),
8721 iemCImpl_cpuid);
8722}
8723
8724
8725/**
8726 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8727 * iemOp_bts_Ev_Gv.
8728 */
8729
8730#define IEMOP_BODY_BIT_Ev_Gv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
8731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8732 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8733 \
8734 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8735 { \
8736 /* register destination. */ \
8737 switch (pVCpu->iem.s.enmEffOpSize) \
8738 { \
8739 case IEMMODE_16BIT: \
8740 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8742 \
8743 IEM_MC_ARG(uint16_t, u16Src, 2); \
8744 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8745 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
8746 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
8747 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8748 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8749 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
8750 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8751 \
8752 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8753 IEM_MC_END(); \
8754 break; \
8755 \
8756 case IEMMODE_32BIT: \
8757 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8759 \
8760 IEM_MC_ARG(uint32_t, u32Src, 2); \
8761 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8762 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
8763 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
8764 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8765 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8766 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
8767 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8768 \
8769 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8770 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8771 IEM_MC_END(); \
8772 break; \
8773 \
8774 case IEMMODE_64BIT: \
8775 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
8776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8777 \
8778 IEM_MC_ARG(uint64_t, u64Src, 2); \
8779 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8780 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
8781 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
8782 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8783 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8784 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
8785 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8786 \
8787 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8788 IEM_MC_END(); \
8789 break; \
8790 \
8791 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8792 } \
8793 } \
8794 else \
8795 { \
8796 /* memory destination. */ \
8797 /** @todo test negative bit offsets! */ \
8798 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
8799 { \
8800 switch (pVCpu->iem.s.enmEffOpSize) \
8801 { \
8802 case IEMMODE_16BIT: \
8803 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8806 IEMOP_HLP_DONE_DECODING(); \
8807 \
8808 IEM_MC_ARG(uint16_t, u16Src, 2); \
8809 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8810 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
8811 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
8812 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
8813 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
8814 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
8815 \
8816 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8817 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
8818 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8819 \
8820 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8821 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
8822 \
8823 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8824 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8825 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8826 IEM_MC_END(); \
8827 break; \
8828 \
8829 case IEMMODE_32BIT: \
8830 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8833 IEMOP_HLP_DONE_DECODING(); \
8834 \
8835 IEM_MC_ARG(uint32_t, u32Src, 2); \
8836 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8837 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
8838 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
8839 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
8840 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
8841 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
8842 \
8843 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8844 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
8845 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8846 \
8847 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8848 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
8849 \
8850 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8851 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8852 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8853 IEM_MC_END(); \
8854 break; \
8855 \
8856 case IEMMODE_64BIT: \
8857 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
8858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8860 IEMOP_HLP_DONE_DECODING(); \
8861 \
8862 IEM_MC_ARG(uint64_t, u64Src, 2); \
8863 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8864 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
8865 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
8866 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
8867 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
8868 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
8869 \
8870 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8871 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
8872 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8873 \
8874 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8875 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
8876 \
8877 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8878 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8879 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8880 IEM_MC_END(); \
8881 break; \
8882 \
8883 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8884 } \
8885 } \
8886 else \
8887 { \
8888 (void)0
8889/* Separate macro to work around parsing issue in IEMAllInstPython.py */
8890#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
8891 switch (pVCpu->iem.s.enmEffOpSize) \
8892 { \
8893 case IEMMODE_16BIT: \
8894 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8897 IEMOP_HLP_DONE_DECODING(); \
8898 \
8899 IEM_MC_ARG(uint16_t, u16Src, 2); \
8900 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8901 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
8902 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
8903 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
8904 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
8905 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
8906 \
8907 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8908 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
8909 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8910 \
8911 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8912 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU16, fEFlagsIn, pu16Dst, u16Src); \
8913 \
8914 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
8915 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8916 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8917 IEM_MC_END(); \
8918 break; \
8919 \
8920 case IEMMODE_32BIT: \
8921 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8924 IEMOP_HLP_DONE_DECODING(); \
8925 \
8926 IEM_MC_ARG(uint32_t, u32Src, 2); \
8927 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8928 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
8929 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
8930 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
8931 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
8932 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
8933 \
8934 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8935 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
8936 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8937 \
8938 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8939 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU32, fEFlagsIn, pu32Dst, u32Src); \
8940 \
8941 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
8942 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8943 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8944 IEM_MC_END(); \
8945 break; \
8946 \
8947 case IEMMODE_64BIT: \
8948 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
8949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8951 IEMOP_HLP_DONE_DECODING(); \
8952 \
8953 IEM_MC_ARG(uint64_t, u64Src, 2); \
8954 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8955 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
8956 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
8957 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
8958 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
8959 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
8960 \
8961 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8962 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
8963 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8964 \
8965 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8966 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU64, fEFlagsIn, pu64Dst, u64Src); \
8967 \
8968 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
8969 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8970 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8971 IEM_MC_END(); \
8972 break; \
8973 \
8974 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8975 } \
8976 } \
8977 } \
8978 (void)0
8979
8980/* Read-only version (bt). */
8981#define IEMOP_BODY_BIT_Ev_Gv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
8982 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8983 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8984 \
8985 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8986 { \
8987 /* register destination. */ \
8988 switch (pVCpu->iem.s.enmEffOpSize) \
8989 { \
8990 case IEMMODE_16BIT: \
8991 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8993 \
8994 IEM_MC_ARG(uint16_t, u16Src, 2); \
8995 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8996 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
8997 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
8998 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8999 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9000 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
9001 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9002 \
9003 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9004 IEM_MC_END(); \
9005 break; \
9006 \
9007 case IEMMODE_32BIT: \
9008 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9010 \
9011 IEM_MC_ARG(uint32_t, u32Src, 2); \
9012 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9013 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9014 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
9015 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9016 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9017 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
9018 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9019 \
9020 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9021 IEM_MC_END(); \
9022 break; \
9023 \
9024 case IEMMODE_64BIT: \
9025 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9027 \
9028 IEM_MC_ARG(uint64_t, u64Src, 2); \
9029 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9030 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9031 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
9032 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9033 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9034 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
9035 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9036 \
9037 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9038 IEM_MC_END(); \
9039 break; \
9040 \
9041 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9042 } \
9043 } \
9044 else \
9045 { \
9046 /* memory destination. */ \
9047 /** @todo test negative bit offsets! */ \
9048 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9049 { \
9050 switch (pVCpu->iem.s.enmEffOpSize) \
9051 { \
9052 case IEMMODE_16BIT: \
9053 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9054 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9056 IEMOP_HLP_DONE_DECODING(); \
9057 \
9058 IEM_MC_ARG(uint16_t, u16Src, 2); \
9059 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9060 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9061 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9062 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9063 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9064 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9065 \
9066 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9067 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
9068 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9069 \
9070 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9071 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
9072 \
9073 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9074 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9075 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9076 IEM_MC_END(); \
9077 break; \
9078 \
9079 case IEMMODE_32BIT: \
9080 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9083 IEMOP_HLP_DONE_DECODING(); \
9084 \
9085 IEM_MC_ARG(uint32_t, u32Src, 2); \
9086 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9087 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9088 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9089 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9090 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9091 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9092 \
9093 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
9094 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9095 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9096 \
9097 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9098 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
9099 \
9100 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9101 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9102 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9103 IEM_MC_END(); \
9104 break; \
9105 \
9106 case IEMMODE_64BIT: \
9107 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9110 IEMOP_HLP_DONE_DECODING(); \
9111 \
9112 IEM_MC_ARG(uint64_t, u64Src, 2); \
9113 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9114 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9115 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9116 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9117 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9118 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9119 \
9120 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9121 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
9122 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9123 \
9124 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9125 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
9126 \
9127 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9128 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9129 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9130 IEM_MC_END(); \
9131 break; \
9132 \
9133 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9134 } \
9135 } \
9136 else \
9137 { \
9138 IEMOP_HLP_DONE_DECODING(); \
9139 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9140 } \
9141 } \
9142 (void)0
9143
9144
9145/**
9146 * @opcode 0xa3
9147 * @oppfx n/a
9148 * @opflclass bitmap
9149 */
9150FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9151{
9152 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9153 IEMOP_HLP_MIN_386();
9154 IEMOP_BODY_BIT_Ev_Gv_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
9155}
9156
9157
9158/**
9159 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9160 */
9161#define IEMOP_BODY_SHLD_SHR_Ib(a_pImplExpr) \
9162 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9163 \
9164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9165 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9166 \
9167 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9168 { \
9169 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9170 \
9171 switch (pVCpu->iem.s.enmEffOpSize) \
9172 { \
9173 case IEMMODE_16BIT: \
9174 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9176 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9177 IEM_MC_ARG(uint16_t, u16Src, 1); \
9178 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9179 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9180 \
9181 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9182 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9183 IEM_MC_REF_EFLAGS(pEFlags); \
9184 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9185 \
9186 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9187 IEM_MC_END(); \
9188 break; \
9189 \
9190 case IEMMODE_32BIT: \
9191 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9193 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9194 IEM_MC_ARG(uint32_t, u32Src, 1); \
9195 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9196 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9197 \
9198 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9199 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9200 IEM_MC_REF_EFLAGS(pEFlags); \
9201 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9202 \
9203 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9204 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9205 IEM_MC_END(); \
9206 break; \
9207 \
9208 case IEMMODE_64BIT: \
9209 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9211 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9212 IEM_MC_ARG(uint64_t, u64Src, 1); \
9213 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9214 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9215 \
9216 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9217 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9218 IEM_MC_REF_EFLAGS(pEFlags); \
9219 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9220 \
9221 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9222 IEM_MC_END(); \
9223 break; \
9224 \
9225 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9226 } \
9227 } \
9228 else \
9229 { \
9230 switch (pVCpu->iem.s.enmEffOpSize) \
9231 { \
9232 case IEMMODE_16BIT: \
9233 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9236 \
9237 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9239 \
9240 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9241 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9242 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9243 \
9244 IEM_MC_ARG(uint16_t, u16Src, 1); \
9245 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9246 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9247 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9248 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9249 \
9250 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9251 IEM_MC_COMMIT_EFLAGS(EFlags); \
9252 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9253 IEM_MC_END(); \
9254 break; \
9255 \
9256 case IEMMODE_32BIT: \
9257 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9260 \
9261 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9263 \
9264 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9265 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9266 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9267 \
9268 IEM_MC_ARG(uint32_t, u32Src, 1); \
9269 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9270 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9271 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9272 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9273 \
9274 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9275 IEM_MC_COMMIT_EFLAGS(EFlags); \
9276 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9277 IEM_MC_END(); \
9278 break; \
9279 \
9280 case IEMMODE_64BIT: \
9281 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9284 \
9285 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9287 \
9288 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9289 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9290 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9291 \
9292 IEM_MC_ARG(uint64_t, u64Src, 1); \
9293 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9294 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9295 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9296 \
9297 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9298 \
9299 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9300 IEM_MC_COMMIT_EFLAGS(EFlags); \
9301 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9302 IEM_MC_END(); \
9303 break; \
9304 \
9305 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9306 } \
9307 } (void)0
9308
9309
9310/**
9311 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9312 */
9313#define IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(a_pImplExpr) \
9314 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9315 \
9316 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9317 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9318 \
9319 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9320 { \
9321 switch (pVCpu->iem.s.enmEffOpSize) \
9322 { \
9323 case IEMMODE_16BIT: \
9324 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9326 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9327 IEM_MC_ARG(uint16_t, u16Src, 1); \
9328 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9329 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9330 \
9331 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9332 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9333 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9334 IEM_MC_REF_EFLAGS(pEFlags); \
9335 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9336 \
9337 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9338 IEM_MC_END(); \
9339 break; \
9340 \
9341 case IEMMODE_32BIT: \
9342 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9344 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9345 IEM_MC_ARG(uint32_t, u32Src, 1); \
9346 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9347 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9348 \
9349 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9350 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9351 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9352 IEM_MC_REF_EFLAGS(pEFlags); \
9353 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9354 \
9355 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9356 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9357 IEM_MC_END(); \
9358 break; \
9359 \
9360 case IEMMODE_64BIT: \
9361 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9363 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9364 IEM_MC_ARG(uint64_t, u64Src, 1); \
9365 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9366 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9367 \
9368 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9369 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9370 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9371 IEM_MC_REF_EFLAGS(pEFlags); \
9372 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9373 \
9374 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9375 IEM_MC_END(); \
9376 break; \
9377 \
9378 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9379 } \
9380 } \
9381 else \
9382 { \
9383 switch (pVCpu->iem.s.enmEffOpSize) \
9384 { \
9385 case IEMMODE_16BIT: \
9386 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9387 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9388 IEM_MC_ARG(uint16_t, u16Src, 1); \
9389 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9391 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9392 \
9393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9395 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9396 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9397 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9398 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9399 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9400 \
9401 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9402 IEM_MC_COMMIT_EFLAGS(EFlags); \
9403 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9404 IEM_MC_END(); \
9405 break; \
9406 \
9407 case IEMMODE_32BIT: \
9408 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9409 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9410 IEM_MC_ARG(uint32_t, u32Src, 1); \
9411 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9413 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9414 \
9415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9417 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9418 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9419 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9420 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9421 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9422 \
9423 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9424 IEM_MC_COMMIT_EFLAGS(EFlags); \
9425 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9426 IEM_MC_END(); \
9427 break; \
9428 \
9429 case IEMMODE_64BIT: \
9430 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9431 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9432 IEM_MC_ARG(uint64_t, u64Src, 1); \
9433 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9435 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9436 \
9437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9439 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9440 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9441 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9442 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9443 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9444 \
9445 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9446 IEM_MC_COMMIT_EFLAGS(EFlags); \
9447 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9448 IEM_MC_END(); \
9449 break; \
9450 \
9451 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9452 } \
9453 } (void)0
9454
9455
9456/**
9457 * @opcode 0xa4
9458 * @opflclass shift_count
9459 */
9460FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9461{
9462 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9463 IEMOP_HLP_MIN_386();
9464 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9465}
9466
9467
9468/**
9469 * @opcode 0xa5
9470 * @opflclass shift_count
9471 */
9472FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9473{
9474 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9475 IEMOP_HLP_MIN_386();
9476 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9477}
9478
9479
9480/** Opcode 0x0f 0xa8. */
9481FNIEMOP_DEF(iemOp_push_gs)
9482{
9483 IEMOP_MNEMONIC(push_gs, "push gs");
9484 IEMOP_HLP_MIN_386();
9485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9486 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9487}
9488
9489
9490/** Opcode 0x0f 0xa9. */
9491FNIEMOP_DEF(iemOp_pop_gs)
9492{
9493 IEMOP_MNEMONIC(pop_gs, "pop gs");
9494 IEMOP_HLP_MIN_386();
9495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9496 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9497 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
9498 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9499 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9500 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9501 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9502 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9503 iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9504}
9505
9506
9507/** Opcode 0x0f 0xaa. */
9508FNIEMOP_DEF(iemOp_rsm)
9509{
9510 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9511 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9513 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9514 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
9515 iemCImpl_rsm);
9516}
9517
9518
9519
9520/**
9521 * @opcode 0xab
9522 * @oppfx n/a
9523 * @opflclass bitmap
9524 */
9525FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9526{
9527 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9528 IEMOP_HLP_MIN_386();
9529 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
9530 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9531}
9532
9533
9534/**
9535 * @opcode 0xac
9536 * @opflclass shift_count
9537 */
9538FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9539{
9540 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9541 IEMOP_HLP_MIN_386();
9542 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9543}
9544
9545
9546/**
9547 * @opcode 0xad
9548 * @opflclass shift_count
9549 */
9550FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9551{
9552 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9553 IEMOP_HLP_MIN_386();
9554 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9555}
9556
9557
9558/** Opcode 0x0f 0xae mem/0. */
9559FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9560{
9561 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9562 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9563 IEMOP_RAISE_INVALID_OPCODE_RET();
9564
9565 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9566 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9569 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9570 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9571 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9572 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9573 IEM_MC_END();
9574}
9575
9576
9577/** Opcode 0x0f 0xae mem/1. */
9578FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9579{
9580 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9581 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9582 IEMOP_RAISE_INVALID_OPCODE_RET();
9583
9584 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9585 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9588 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9589 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9590 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9591 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
9592 iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9593 IEM_MC_END();
9594}
9595
9596
9597/**
9598 * @opmaps grp15
9599 * @opcode !11/2
9600 * @oppfx none
9601 * @opcpuid sse
9602 * @opgroup og_sse_mxcsrsm
9603 * @opxcpttype 5
9604 * @optest op1=0 -> mxcsr=0
9605 * @optest op1=0x2083 -> mxcsr=0x2083
9606 * @optest op1=0xfffffffe -> value.xcpt=0xd
9607 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9608 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9609 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9610 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9611 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9612 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9613 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9614 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9615 */
9616FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9617{
9618 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9619 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9620 IEMOP_RAISE_INVALID_OPCODE_RET();
9621
9622 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9623 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9626 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9627 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9628 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_MxCsr), iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9629 IEM_MC_END();
9630}
9631
9632
9633/**
9634 * @opmaps grp15
9635 * @opcode !11/3
9636 * @oppfx none
9637 * @opcpuid sse
9638 * @opgroup og_sse_mxcsrsm
9639 * @opxcpttype 5
9640 * @optest mxcsr=0 -> op1=0
9641 * @optest mxcsr=0x2083 -> op1=0x2083
9642 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9643 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9644 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9645 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9646 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9647 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9648 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9649 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9650 */
9651FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9652{
9653 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9654 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9655 IEMOP_RAISE_INVALID_OPCODE_RET();
9656
9657 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9658 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9661 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9662 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9663 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_MxCsr), iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9664 IEM_MC_END();
9665}
9666
9667
9668/**
9669 * @opmaps grp15
9670 * @opcode !11/4
9671 * @oppfx none
9672 * @opcpuid xsave
9673 * @opgroup og_system
9674 * @opxcpttype none
9675 */
9676FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9677{
9678 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9679 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9680 IEMOP_RAISE_INVALID_OPCODE_RET();
9681
9682 IEM_MC_BEGIN(IEM_MC_F_MIN_CORE, 0);
9683 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9686 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9687 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9688 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9689 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9690 IEM_MC_END();
9691}
9692
9693
9694/**
9695 * @opmaps grp15
9696 * @opcode !11/5
9697 * @oppfx none
9698 * @opcpuid xsave
9699 * @opgroup og_system
9700 * @opxcpttype none
9701 */
9702FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9703{
9704 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9705 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9706 IEMOP_RAISE_INVALID_OPCODE_RET();
9707
9708 IEM_MC_BEGIN(IEM_MC_F_MIN_CORE, 0);
9709 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9712 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9713 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9714 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9715 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
9716 iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9717 IEM_MC_END();
9718}
9719
9720/** Opcode 0x0f 0xae mem/6. */
9721FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9722
9723/**
9724 * @opmaps grp15
9725 * @opcode !11/7
9726 * @oppfx none
9727 * @opcpuid clfsh
9728 * @opgroup og_cachectl
9729 * @optest op1=1 ->
9730 */
9731FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9732{
9733 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9734 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9735 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9736
9737 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9738 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9741 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9742 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9743 IEM_MC_END();
9744}
9745
9746/**
9747 * @opmaps grp15
9748 * @opcode !11/7
9749 * @oppfx 0x66
9750 * @opcpuid clflushopt
9751 * @opgroup og_cachectl
9752 * @optest op1=1 ->
9753 */
9754FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9755{
9756 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9757 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9758 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9759
9760 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9761 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9764 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9765 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9766 IEM_MC_END();
9767}
9768
9769
9770/** Opcode 0x0f 0xae 11b/5. */
9771FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9772{
9773 RT_NOREF_PV(bRm);
9774 IEMOP_MNEMONIC(lfence, "lfence");
9775 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9777#ifdef RT_ARCH_ARM64
9778 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9779#else
9780 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9781 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9782 else
9783 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9784#endif
9785 IEM_MC_ADVANCE_RIP_AND_FINISH();
9786 IEM_MC_END();
9787}
9788
9789
9790/** Opcode 0x0f 0xae 11b/6. */
9791FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9792{
9793 RT_NOREF_PV(bRm);
9794 IEMOP_MNEMONIC(mfence, "mfence");
9795 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9797#ifdef RT_ARCH_ARM64
9798 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9799#else
9800 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9801 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9802 else
9803 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9804#endif
9805 IEM_MC_ADVANCE_RIP_AND_FINISH();
9806 IEM_MC_END();
9807}
9808
9809
9810/** Opcode 0x0f 0xae 11b/7. */
9811FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9812{
9813 RT_NOREF_PV(bRm);
9814 IEMOP_MNEMONIC(sfence, "sfence");
9815 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9817#ifdef RT_ARCH_ARM64
9818 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9819#else
9820 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9821 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9822 else
9823 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9824#endif
9825 IEM_MC_ADVANCE_RIP_AND_FINISH();
9826 IEM_MC_END();
9827}
9828
9829
9830/** Opcode 0xf3 0x0f 0xae 11b/0. */
9831FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9832{
9833 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9834 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9835 {
9836 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9838 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9839 IEM_MC_LOCAL(uint64_t, u64Dst);
9840 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9841 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9842 IEM_MC_ADVANCE_RIP_AND_FINISH();
9843 IEM_MC_END();
9844 }
9845 else
9846 {
9847 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9849 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9850 IEM_MC_LOCAL(uint32_t, u32Dst);
9851 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
9852 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9853 IEM_MC_ADVANCE_RIP_AND_FINISH();
9854 IEM_MC_END();
9855 }
9856}
9857
9858
9859/** Opcode 0xf3 0x0f 0xae 11b/1. */
9860FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
9861{
9862 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
9863 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9864 {
9865 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9867 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9868 IEM_MC_LOCAL(uint64_t, u64Dst);
9869 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
9870 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9871 IEM_MC_ADVANCE_RIP_AND_FINISH();
9872 IEM_MC_END();
9873 }
9874 else
9875 {
9876 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9878 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9879 IEM_MC_LOCAL(uint32_t, u32Dst);
9880 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
9881 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9882 IEM_MC_ADVANCE_RIP_AND_FINISH();
9883 IEM_MC_END();
9884 }
9885}
9886
9887
9888/** Opcode 0xf3 0x0f 0xae 11b/2. */
9889FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
9890{
9891 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
9892 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9893 {
9894 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9896 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9897 IEM_MC_LOCAL(uint64_t, u64Dst);
9898 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9899 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9900 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
9901 IEM_MC_ADVANCE_RIP_AND_FINISH();
9902 IEM_MC_END();
9903 }
9904 else
9905 {
9906 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9908 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9909 IEM_MC_LOCAL(uint32_t, u32Dst);
9910 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9911 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
9912 IEM_MC_ADVANCE_RIP_AND_FINISH();
9913 IEM_MC_END();
9914 }
9915}
9916
9917
9918/** Opcode 0xf3 0x0f 0xae 11b/3. */
9919FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
9920{
9921 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
9922 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9923 {
9924 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9926 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9927 IEM_MC_LOCAL(uint64_t, u64Dst);
9928 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9929 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9930 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
9931 IEM_MC_ADVANCE_RIP_AND_FINISH();
9932 IEM_MC_END();
9933 }
9934 else
9935 {
9936 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9938 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9939 IEM_MC_LOCAL(uint32_t, u32Dst);
9940 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9941 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
9942 IEM_MC_ADVANCE_RIP_AND_FINISH();
9943 IEM_MC_END();
9944 }
9945}
9946
9947
9948/**
9949 * Group 15 jump table for register variant.
9950 */
9951IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
9952{ /* pfx: none, 066h, 0f3h, 0f2h */
9953 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
9954 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
9955 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
9956 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
9957 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9958 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9959 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9960 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9961};
9962AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
9963
9964
9965/**
9966 * Group 15 jump table for memory variant.
9967 */
9968IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
9969{ /* pfx: none, 066h, 0f3h, 0f2h */
9970 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9971 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9972 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9973 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9974 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9975 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9976 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9977 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9978};
9979AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
9980
9981
9982/** Opcode 0x0f 0xae. */
9983FNIEMOP_DEF(iemOp_Grp15)
9984{
9985 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
9986 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9987 if (IEM_IS_MODRM_REG_MODE(bRm))
9988 /* register, register */
9989 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9990 + pVCpu->iem.s.idxPrefix], bRm);
9991 /* memory, register */
9992 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9993 + pVCpu->iem.s.idxPrefix], bRm);
9994}
9995
9996
9997/**
9998 * @opcode 0xaf
9999 * @opflclass multiply
10000 */
10001FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10002{
10003 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10004 IEMOP_HLP_MIN_386();
10005 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10006 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10008 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_MIN_386, imul, 0);
10009}
10010
10011
10012/**
10013 * @opcode 0xb0
10014 * @opflclass arithmetic
10015 */
10016FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10017{
10018 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10019 IEMOP_HLP_MIN_486();
10020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10021
10022 if (IEM_IS_MODRM_REG_MODE(bRm))
10023 {
10024 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10026 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10027 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10028 IEM_MC_ARG(uint8_t, u8Src, 2);
10029 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10030
10031 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10032 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10033 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10034 IEM_MC_REF_EFLAGS(pEFlags);
10035 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10036
10037 IEM_MC_ADVANCE_RIP_AND_FINISH();
10038 IEM_MC_END();
10039 }
10040 else
10041 {
10042#define IEMOP_BODY_CMPXCHG_BYTE(a_fnWorker, a_Type) \
10043 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10046 IEMOP_HLP_DONE_DECODING(); \
10047 \
10048 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10049 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
10050 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10051 \
10052 IEM_MC_ARG(uint8_t, u8Src, 2); \
10053 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10054 \
10055 IEM_MC_LOCAL(uint8_t, u8Al); \
10056 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX); \
10057 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Al, u8Al, 1); \
10058 \
10059 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10060 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu8Dst, pu8Al, u8Src, pEFlags); \
10061 \
10062 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10063 IEM_MC_COMMIT_EFLAGS(EFlags); \
10064 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al); \
10065 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10066 IEM_MC_END()
10067
10068 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10069 {
10070 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8,RW);
10071 }
10072 else
10073 {
10074 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8_locked,ATOMIC);
10075 }
10076 }
10077}
10078
10079/**
10080 * @opcode 0xb1
10081 * @opflclass arithmetic
10082 */
10083FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10084{
10085 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10086 IEMOP_HLP_MIN_486();
10087 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10088
10089 if (IEM_IS_MODRM_REG_MODE(bRm))
10090 {
10091 switch (pVCpu->iem.s.enmEffOpSize)
10092 {
10093 case IEMMODE_16BIT:
10094 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10096 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10097 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10098 IEM_MC_ARG(uint16_t, u16Src, 2);
10099 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10100
10101 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10102 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10103 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10104 IEM_MC_REF_EFLAGS(pEFlags);
10105 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10106
10107 IEM_MC_ADVANCE_RIP_AND_FINISH();
10108 IEM_MC_END();
10109 break;
10110
10111 case IEMMODE_32BIT:
10112 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10114 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10115 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10116 IEM_MC_ARG(uint32_t, u32Src, 2);
10117 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10118
10119 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10120 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10121 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10122 IEM_MC_REF_EFLAGS(pEFlags);
10123 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10124
10125 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10126 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
10127 } IEM_MC_ELSE() {
10128 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
10129 } IEM_MC_ENDIF();
10130
10131 IEM_MC_ADVANCE_RIP_AND_FINISH();
10132 IEM_MC_END();
10133 break;
10134
10135 case IEMMODE_64BIT:
10136 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10138 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10139 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10140 IEM_MC_ARG(uint64_t, u64Src, 2);
10141 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10142
10143 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10144 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10145 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10146 IEM_MC_REF_EFLAGS(pEFlags);
10147 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10148
10149 IEM_MC_ADVANCE_RIP_AND_FINISH();
10150 IEM_MC_END();
10151 break;
10152
10153 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10154 }
10155 }
10156 else
10157 {
10158#define IEMOP_BODY_CMPXCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64,a_Type) \
10159 do { \
10160 switch (pVCpu->iem.s.enmEffOpSize) \
10161 { \
10162 case IEMMODE_16BIT: \
10163 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10164 \
10165 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10168 IEMOP_HLP_DONE_DECODING(); \
10169 \
10170 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10171 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10172 \
10173 IEM_MC_ARG(uint16_t, u16Src, 2); \
10174 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10175 \
10176 IEM_MC_LOCAL(uint16_t, u16Ax); \
10177 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX); \
10178 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Ax, u16Ax, 1); \
10179 \
10180 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10181 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker16, pu16Dst, pu16Ax, u16Src, pEFlags); \
10182 \
10183 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10184 IEM_MC_COMMIT_EFLAGS(EFlags); \
10185 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax); \
10186 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10187 IEM_MC_END(); \
10188 break; \
10189 \
10190 case IEMMODE_32BIT: \
10191 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10194 IEMOP_HLP_DONE_DECODING(); \
10195 \
10196 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10197 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10198 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10199 \
10200 IEM_MC_ARG(uint32_t, u32Src, 2); \
10201 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10202 \
10203 IEM_MC_LOCAL(uint32_t, u32Eax); \
10204 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX); \
10205 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Eax, u32Eax, 1); \
10206 \
10207 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10208 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker32, pu32Dst, pu32Eax, u32Src, pEFlags); \
10209 \
10210 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10211 IEM_MC_COMMIT_EFLAGS(EFlags); \
10212 \
10213 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
10214 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax); \
10215 } IEM_MC_ENDIF(); \
10216 \
10217 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10218 IEM_MC_END(); \
10219 break; \
10220 \
10221 case IEMMODE_64BIT: \
10222 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10225 IEMOP_HLP_DONE_DECODING(); \
10226 \
10227 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10228 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10229 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10230 \
10231 IEM_MC_ARG(uint64_t, u64Src, 2); \
10232 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10233 \
10234 IEM_MC_LOCAL(uint64_t, u64Rax); \
10235 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX); \
10236 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Rax, u64Rax, 1); \
10237 \
10238 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10239 \
10240 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker64, pu64Dst, pu64Rax, u64Src, pEFlags); \
10241 \
10242 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10243 IEM_MC_COMMIT_EFLAGS(EFlags); \
10244 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax); \
10245 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10246 IEM_MC_END(); \
10247 break; \
10248 \
10249 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10250 } \
10251 } while (0)
10252
10253 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10254 {
10255 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16, iemAImpl_cmpxchg_u32, iemAImpl_cmpxchg_u64,RW);
10256 }
10257 else
10258 {
10259 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16_locked, iemAImpl_cmpxchg_u32_locked, iemAImpl_cmpxchg_u64_locked,ATOMIC);
10260 }
10261 }
10262}
10263
10264
10265/** Opcode 0x0f 0xb2. */
10266FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10267{
10268 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10269 IEMOP_HLP_MIN_386();
10270 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10271 if (IEM_IS_MODRM_REG_MODE(bRm))
10272 IEMOP_RAISE_INVALID_OPCODE_RET();
10273 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10274}
10275
10276
10277/**
10278 * @opcode 0xb3
10279 * @oppfx n/a
10280 * @opflclass bitmap
10281 */
10282FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10283{
10284 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10285 IEMOP_HLP_MIN_386();
10286 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
10287 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10288}
10289
10290
10291/** Opcode 0x0f 0xb4. */
10292FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10293{
10294 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10295 IEMOP_HLP_MIN_386();
10296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10297 if (IEM_IS_MODRM_REG_MODE(bRm))
10298 IEMOP_RAISE_INVALID_OPCODE_RET();
10299 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10300}
10301
10302
10303/** Opcode 0x0f 0xb5. */
10304FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10305{
10306 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10307 IEMOP_HLP_MIN_386();
10308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10309 if (IEM_IS_MODRM_REG_MODE(bRm))
10310 IEMOP_RAISE_INVALID_OPCODE_RET();
10311 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10312}
10313
10314
10315/** Opcode 0x0f 0xb6. */
10316FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10317{
10318 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10319 IEMOP_HLP_MIN_386();
10320
10321 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10322
10323 /*
10324 * If rm is denoting a register, no more instruction bytes.
10325 */
10326 if (IEM_IS_MODRM_REG_MODE(bRm))
10327 {
10328 switch (pVCpu->iem.s.enmEffOpSize)
10329 {
10330 case IEMMODE_16BIT:
10331 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10333 IEM_MC_LOCAL(uint16_t, u16Value);
10334 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10335 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10336 IEM_MC_ADVANCE_RIP_AND_FINISH();
10337 IEM_MC_END();
10338 break;
10339
10340 case IEMMODE_32BIT:
10341 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10343 IEM_MC_LOCAL(uint32_t, u32Value);
10344 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10345 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10346 IEM_MC_ADVANCE_RIP_AND_FINISH();
10347 IEM_MC_END();
10348 break;
10349
10350 case IEMMODE_64BIT:
10351 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10353 IEM_MC_LOCAL(uint64_t, u64Value);
10354 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10355 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10356 IEM_MC_ADVANCE_RIP_AND_FINISH();
10357 IEM_MC_END();
10358 break;
10359
10360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10361 }
10362 }
10363 else
10364 {
10365 /*
10366 * We're loading a register from memory.
10367 */
10368 switch (pVCpu->iem.s.enmEffOpSize)
10369 {
10370 case IEMMODE_16BIT:
10371 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10372 IEM_MC_LOCAL(uint16_t, u16Value);
10373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10374 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10376 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10377 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10378 IEM_MC_ADVANCE_RIP_AND_FINISH();
10379 IEM_MC_END();
10380 break;
10381
10382 case IEMMODE_32BIT:
10383 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10384 IEM_MC_LOCAL(uint32_t, u32Value);
10385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10388 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10389 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10390 IEM_MC_ADVANCE_RIP_AND_FINISH();
10391 IEM_MC_END();
10392 break;
10393
10394 case IEMMODE_64BIT:
10395 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10396 IEM_MC_LOCAL(uint64_t, u64Value);
10397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10400 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10401 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10402 IEM_MC_ADVANCE_RIP_AND_FINISH();
10403 IEM_MC_END();
10404 break;
10405
10406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10407 }
10408 }
10409}
10410
10411
10412/** Opcode 0x0f 0xb7. */
10413FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10414{
10415 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10416 IEMOP_HLP_MIN_386();
10417
10418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10419
10420 /** @todo Not entirely sure how the operand size prefix is handled here,
10421 * assuming that it will be ignored. Would be nice to have a few
10422 * test for this. */
10423
10424 /** @todo There should be no difference in the behaviour whether REX.W is
10425 * present or not... */
10426
10427 /*
10428 * If rm is denoting a register, no more instruction bytes.
10429 */
10430 if (IEM_IS_MODRM_REG_MODE(bRm))
10431 {
10432 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10433 {
10434 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10436 IEM_MC_LOCAL(uint32_t, u32Value);
10437 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10438 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10439 IEM_MC_ADVANCE_RIP_AND_FINISH();
10440 IEM_MC_END();
10441 }
10442 else
10443 {
10444 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10446 IEM_MC_LOCAL(uint64_t, u64Value);
10447 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10448 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10449 IEM_MC_ADVANCE_RIP_AND_FINISH();
10450 IEM_MC_END();
10451 }
10452 }
10453 else
10454 {
10455 /*
10456 * We're loading a register from memory.
10457 */
10458 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10459 {
10460 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10461 IEM_MC_LOCAL(uint32_t, u32Value);
10462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10465 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10466 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10467 IEM_MC_ADVANCE_RIP_AND_FINISH();
10468 IEM_MC_END();
10469 }
10470 else
10471 {
10472 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10473 IEM_MC_LOCAL(uint64_t, u64Value);
10474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10477 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10478 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10479 IEM_MC_ADVANCE_RIP_AND_FINISH();
10480 IEM_MC_END();
10481 }
10482 }
10483}
10484
10485
10486/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10487FNIEMOP_UD_STUB(iemOp_jmpe);
10488
10489
10490/**
10491 * @opcode 0xb8
10492 * @oppfx 0xf3
10493 * @opflmodify cf,pf,af,zf,sf,of
10494 * @opflclear cf,pf,af,sf,of
10495 */
10496FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10497{
10498 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10499 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10500 return iemOp_InvalidNeedRM(pVCpu);
10501#ifndef TST_IEM_CHECK_MC
10502# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10503 static const IEMOPBINSIZES s_Native =
10504 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10505# endif
10506 static const IEMOPBINSIZES s_Fallback =
10507 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10508#endif
10509 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10511 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, popcnt, 0);
10512}
10513
10514
10515/**
10516 * @opcode 0xb9
10517 * @opinvalid intel-modrm
10518 * @optest ->
10519 */
10520FNIEMOP_DEF(iemOp_Grp10)
10521{
10522 /*
10523 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10524 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10525 */
10526 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10527 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10528 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10529}
10530
10531
10532/**
10533 * Body for group 8 bit instruction.
10534 */
10535#define IEMOP_BODY_BIT_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10536 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10537 \
10538 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10539 { \
10540 /* register destination. */ \
10541 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10542 \
10543 switch (pVCpu->iem.s.enmEffOpSize) \
10544 { \
10545 case IEMMODE_16BIT: \
10546 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10548 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10549 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10550 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10551 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10552 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10553 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10554 \
10555 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10556 IEM_MC_END(); \
10557 break; \
10558 \
10559 case IEMMODE_32BIT: \
10560 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10562 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10563 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10564 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10565 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10566 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10567 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10568 \
10569 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
10570 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10571 IEM_MC_END(); \
10572 break; \
10573 \
10574 case IEMMODE_64BIT: \
10575 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10577 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10578 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10579 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10580 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10581 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10582 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10583 \
10584 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10585 IEM_MC_END(); \
10586 break; \
10587 \
10588 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10589 } \
10590 } \
10591 else \
10592 { \
10593 /* memory destination. */ \
10594 /** @todo test negative bit offsets! */ \
10595 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
10596 { \
10597 switch (pVCpu->iem.s.enmEffOpSize) \
10598 { \
10599 case IEMMODE_16BIT: \
10600 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10603 \
10604 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10605 IEMOP_HLP_DONE_DECODING(); \
10606 \
10607 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10608 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10609 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10610 \
10611 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10612 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10613 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10614 \
10615 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10616 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10617 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10618 IEM_MC_END(); \
10619 break; \
10620 \
10621 case IEMMODE_32BIT: \
10622 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10625 \
10626 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10627 IEMOP_HLP_DONE_DECODING(); \
10628 \
10629 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10630 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10631 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10632 \
10633 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10634 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10635 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10636 \
10637 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10638 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10639 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10640 IEM_MC_END(); \
10641 break; \
10642 \
10643 case IEMMODE_64BIT: \
10644 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10647 \
10648 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10649 IEMOP_HLP_DONE_DECODING(); \
10650 \
10651 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10652 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10653 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10654 \
10655 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10656 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10657 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10658 \
10659 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10660 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10661 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10662 IEM_MC_END(); \
10663 break; \
10664 \
10665 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10666 } \
10667 } \
10668 else \
10669 { \
10670 (void)0
10671/* Separate macro to work around parsing issue in IEMAllInstPython.py */
10672#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10673 switch (pVCpu->iem.s.enmEffOpSize) \
10674 { \
10675 case IEMMODE_16BIT: \
10676 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10679 \
10680 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10681 IEMOP_HLP_DONE_DECODING(); \
10682 \
10683 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10684 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10685 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10686 \
10687 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10688 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10689 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU16, fEFlagsIn, pu16Dst, u16Src); \
10690 \
10691 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10692 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10693 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10694 IEM_MC_END(); \
10695 break; \
10696 \
10697 case IEMMODE_32BIT: \
10698 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10701 \
10702 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10703 IEMOP_HLP_DONE_DECODING(); \
10704 \
10705 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10706 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10707 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10708 \
10709 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10710 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10711 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU32, fEFlagsIn, pu32Dst, u32Src); \
10712 \
10713 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10714 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10715 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10716 IEM_MC_END(); \
10717 break; \
10718 \
10719 case IEMMODE_64BIT: \
10720 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10723 \
10724 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10725 IEMOP_HLP_DONE_DECODING(); \
10726 \
10727 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10728 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10729 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10730 \
10731 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10732 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10733 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU64, fEFlagsIn, pu64Dst, u64Src); \
10734 \
10735 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10736 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10737 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10738 IEM_MC_END(); \
10739 break; \
10740 \
10741 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10742 } \
10743 } \
10744 } \
10745 (void)0
10746
10747/* Read-only version (bt) */
10748#define IEMOP_BODY_BIT_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10749 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10750 \
10751 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10752 { \
10753 /* register destination. */ \
10754 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10755 \
10756 switch (pVCpu->iem.s.enmEffOpSize) \
10757 { \
10758 case IEMMODE_16BIT: \
10759 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10761 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
10762 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10763 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10764 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10765 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10766 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10767 \
10768 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10769 IEM_MC_END(); \
10770 break; \
10771 \
10772 case IEMMODE_32BIT: \
10773 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10775 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
10776 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10777 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10778 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10779 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10780 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10781 \
10782 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10783 IEM_MC_END(); \
10784 break; \
10785 \
10786 case IEMMODE_64BIT: \
10787 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10789 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
10790 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10791 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10792 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10793 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10794 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10795 \
10796 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10797 IEM_MC_END(); \
10798 break; \
10799 \
10800 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10801 } \
10802 } \
10803 else \
10804 { \
10805 /* memory destination. */ \
10806 /** @todo test negative bit offsets! */ \
10807 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10808 { \
10809 switch (pVCpu->iem.s.enmEffOpSize) \
10810 { \
10811 case IEMMODE_16BIT: \
10812 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10815 \
10816 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10817 IEMOP_HLP_DONE_DECODING(); \
10818 \
10819 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10820 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
10821 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10822 \
10823 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10824 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10825 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10826 \
10827 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
10828 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10829 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10830 IEM_MC_END(); \
10831 break; \
10832 \
10833 case IEMMODE_32BIT: \
10834 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10837 \
10838 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10839 IEMOP_HLP_DONE_DECODING(); \
10840 \
10841 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10842 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
10843 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10844 \
10845 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10846 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10847 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10848 \
10849 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
10850 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10851 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10852 IEM_MC_END(); \
10853 break; \
10854 \
10855 case IEMMODE_64BIT: \
10856 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10859 \
10860 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10861 IEMOP_HLP_DONE_DECODING(); \
10862 \
10863 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10864 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
10865 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10866 \
10867 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10868 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10869 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10870 \
10871 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
10872 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10873 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10874 IEM_MC_END(); \
10875 break; \
10876 \
10877 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10878 } \
10879 } \
10880 else \
10881 { \
10882 IEMOP_HLP_DONE_DECODING(); \
10883 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
10884 } \
10885 } \
10886 (void)0
10887
10888
10889/**
10890 * @opmaps grp8
10891 * @opcode /4
10892 * @oppfx n/a
10893 * @opflclass bitmap
10894 */
10895FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
10896{
10897 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
10898 IEMOP_BODY_BIT_Ev_Ib_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
10899}
10900
10901
10902/**
10903 * @opmaps grp8
10904 * @opcode /5
10905 * @oppfx n/a
10906 * @opflclass bitmap
10907 */
10908FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
10909{
10910 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
10911 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
10912 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
10913}
10914
10915
10916/**
10917 * @opmaps grp8
10918 * @opcode /6
10919 * @oppfx n/a
10920 * @opflclass bitmap
10921 */
10922FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
10923{
10924 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
10925 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
10926 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10927}
10928
10929
10930/**
10931 * @opmaps grp8
10932 * @opcode /7
10933 * @oppfx n/a
10934 * @opflclass bitmap
10935 */
10936FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
10937{
10938 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
10939 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
10940 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
10941}
10942
10943
10944/** Opcode 0x0f 0xba. */
10945FNIEMOP_DEF(iemOp_Grp8)
10946{
10947 IEMOP_HLP_MIN_386();
10948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10949 switch (IEM_GET_MODRM_REG_8(bRm))
10950 {
10951 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
10952 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
10953 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
10954 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
10955
10956 case 0: case 1: case 2: case 3:
10957 /* Both AMD and Intel want full modr/m decoding and imm8. */
10958 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
10959
10960 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10961 }
10962}
10963
10964
10965/**
10966 * @opcode 0xbb
10967 * @oppfx n/a
10968 * @opflclass bitmap
10969 */
10970FNIEMOP_DEF(iemOp_btc_Ev_Gv)
10971{
10972 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
10973 IEMOP_HLP_MIN_386();
10974 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
10975 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
10976}
10977
10978
10979/**
10980 * Body for BSF and BSR instructions.
10981 *
10982 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
10983 * the destination register, which means that for 32-bit operations the high
10984 * bits must be left alone.
10985 *
10986 * @param pImpl Pointer to the instruction implementation (assembly).
10987 */
10988#define IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl) \
10989 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
10990 \
10991 /* \
10992 * If rm is denoting a register, no more instruction bytes. \
10993 */ \
10994 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10995 { \
10996 switch (pVCpu->iem.s.enmEffOpSize) \
10997 { \
10998 case IEMMODE_16BIT: \
10999 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11001 \
11002 IEM_MC_ARG(uint16_t, u16Src, 2); \
11003 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11004 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
11005 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11006 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11007 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
11008 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11009 \
11010 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11011 IEM_MC_END(); \
11012 break; \
11013 \
11014 case IEMMODE_32BIT: \
11015 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11017 \
11018 IEM_MC_ARG(uint32_t, u32Src, 2); \
11019 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11020 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
11021 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11022 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11023 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
11024 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11025 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11026 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11027 } IEM_MC_ENDIF(); \
11028 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11029 IEM_MC_END(); \
11030 break; \
11031 \
11032 case IEMMODE_64BIT: \
11033 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11035 \
11036 IEM_MC_ARG(uint64_t, u64Src, 2); \
11037 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11038 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
11039 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11040 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11041 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
11042 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11043 \
11044 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11045 IEM_MC_END(); \
11046 break; \
11047 \
11048 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11049 } \
11050 } \
11051 else \
11052 { \
11053 /* \
11054 * We're accessing memory. \
11055 */ \
11056 switch (pVCpu->iem.s.enmEffOpSize) \
11057 { \
11058 case IEMMODE_16BIT: \
11059 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11063 \
11064 IEM_MC_ARG(uint16_t, u16Src, 2); \
11065 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11066 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
11067 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11068 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11069 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
11070 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11071 \
11072 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11073 IEM_MC_END(); \
11074 break; \
11075 \
11076 case IEMMODE_32BIT: \
11077 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11081 \
11082 IEM_MC_ARG(uint32_t, u32Src, 2); \
11083 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11084 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
11085 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11086 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11087 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
11088 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11089 \
11090 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11091 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11092 } IEM_MC_ENDIF(); \
11093 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11094 IEM_MC_END(); \
11095 break; \
11096 \
11097 case IEMMODE_64BIT: \
11098 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11102 \
11103 IEM_MC_ARG(uint64_t, u64Src, 2); \
11104 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11105 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
11106 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11107 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11108 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
11109 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11110 \
11111 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11112 IEM_MC_END(); \
11113 break; \
11114 \
11115 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11116 } \
11117 } (void)0
11118
11119
11120/**
11121 * @opcode 0xbc
11122 * @oppfx !0xf3
11123 * @opfltest cf,pf,af,sf,of
11124 * @opflmodify cf,pf,af,zf,sf,of
11125 * @opflundef cf,pf,af,sf,of
11126 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11127 * document them as inputs. Sigh.
11128 */
11129FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11130{
11131 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11132 IEMOP_HLP_MIN_386();
11133 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11134 PCIEMOPBINSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags);
11135 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11136}
11137
11138
11139/**
11140 * @opcode 0xbc
11141 * @oppfx 0xf3
11142 * @opfltest pf,af,sf,of
11143 * @opflmodify cf,pf,af,zf,sf,of
11144 * @opflundef pf,af,sf,of
11145 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11146 * document them as inputs. Sigh.
11147 */
11148FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11149{
11150 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11151 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11152 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11153
11154#ifndef TST_IEM_CHECK_MC
11155 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11156 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11157 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11158 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11159 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11160 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11161 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11162 {
11163 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11164 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11165 };
11166#endif
11167 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11168 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11169 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11170 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11171 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, tzcnt, 0);
11172}
11173
11174
11175/**
11176 * @opcode 0xbd
11177 * @oppfx !0xf3
11178 * @opfltest cf,pf,af,sf,of
11179 * @opflmodify cf,pf,af,zf,sf,of
11180 * @opflundef cf,pf,af,sf,of
11181 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11182 * document them as inputs. Sigh.
11183 */
11184FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11185{
11186 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11187 IEMOP_HLP_MIN_386();
11188 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11189 PCIEMOPBINSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags);
11190 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11191}
11192
11193
11194/**
11195 * @opcode 0xbd
11196 * @oppfx 0xf3
11197 * @opfltest pf,af,sf,of
11198 * @opflmodify cf,pf,af,zf,sf,of
11199 * @opflundef pf,af,sf,of
11200 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11201 * document them as inputs. Sigh.
11202 */
11203FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11204{
11205 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAbm)
11206 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11207 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11208
11209#ifndef TST_IEM_CHECK_MC
11210 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11211 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11212 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11213 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11214 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11215 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11216 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11217 {
11218 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11219 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11220 };
11221#endif
11222 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11223 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11224 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11225 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11226 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, lzcnt, 0);
11227}
11228
11229
11230
11231/** Opcode 0x0f 0xbe. */
11232FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11233{
11234 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11235 IEMOP_HLP_MIN_386();
11236
11237 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11238
11239 /*
11240 * If rm is denoting a register, no more instruction bytes.
11241 */
11242 if (IEM_IS_MODRM_REG_MODE(bRm))
11243 {
11244 switch (pVCpu->iem.s.enmEffOpSize)
11245 {
11246 case IEMMODE_16BIT:
11247 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11249 IEM_MC_LOCAL(uint16_t, u16Value);
11250 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11251 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11252 IEM_MC_ADVANCE_RIP_AND_FINISH();
11253 IEM_MC_END();
11254 break;
11255
11256 case IEMMODE_32BIT:
11257 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11259 IEM_MC_LOCAL(uint32_t, u32Value);
11260 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11261 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11262 IEM_MC_ADVANCE_RIP_AND_FINISH();
11263 IEM_MC_END();
11264 break;
11265
11266 case IEMMODE_64BIT:
11267 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11269 IEM_MC_LOCAL(uint64_t, u64Value);
11270 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11271 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11272 IEM_MC_ADVANCE_RIP_AND_FINISH();
11273 IEM_MC_END();
11274 break;
11275
11276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11277 }
11278 }
11279 else
11280 {
11281 /*
11282 * We're loading a register from memory.
11283 */
11284 switch (pVCpu->iem.s.enmEffOpSize)
11285 {
11286 case IEMMODE_16BIT:
11287 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11288 IEM_MC_LOCAL(uint16_t, u16Value);
11289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11292 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11293 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11294 IEM_MC_ADVANCE_RIP_AND_FINISH();
11295 IEM_MC_END();
11296 break;
11297
11298 case IEMMODE_32BIT:
11299 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11300 IEM_MC_LOCAL(uint32_t, u32Value);
11301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11304 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11305 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11306 IEM_MC_ADVANCE_RIP_AND_FINISH();
11307 IEM_MC_END();
11308 break;
11309
11310 case IEMMODE_64BIT:
11311 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11312 IEM_MC_LOCAL(uint64_t, u64Value);
11313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11314 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11316 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11317 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11318 IEM_MC_ADVANCE_RIP_AND_FINISH();
11319 IEM_MC_END();
11320 break;
11321
11322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11323 }
11324 }
11325}
11326
11327
11328/** Opcode 0x0f 0xbf. */
11329FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11330{
11331 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11332 IEMOP_HLP_MIN_386();
11333
11334 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11335
11336 /** @todo Not entirely sure how the operand size prefix is handled here,
11337 * assuming that it will be ignored. Would be nice to have a few
11338 * test for this. */
11339 /*
11340 * If rm is denoting a register, no more instruction bytes.
11341 */
11342 if (IEM_IS_MODRM_REG_MODE(bRm))
11343 {
11344 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11345 {
11346 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11348 IEM_MC_LOCAL(uint32_t, u32Value);
11349 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11350 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11351 IEM_MC_ADVANCE_RIP_AND_FINISH();
11352 IEM_MC_END();
11353 }
11354 else
11355 {
11356 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11358 IEM_MC_LOCAL(uint64_t, u64Value);
11359 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11360 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11361 IEM_MC_ADVANCE_RIP_AND_FINISH();
11362 IEM_MC_END();
11363 }
11364 }
11365 else
11366 {
11367 /*
11368 * We're loading a register from memory.
11369 */
11370 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11371 {
11372 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11373 IEM_MC_LOCAL(uint32_t, u32Value);
11374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11377 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11378 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11379 IEM_MC_ADVANCE_RIP_AND_FINISH();
11380 IEM_MC_END();
11381 }
11382 else
11383 {
11384 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11385 IEM_MC_LOCAL(uint64_t, u64Value);
11386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11389 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11390 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11391 IEM_MC_ADVANCE_RIP_AND_FINISH();
11392 IEM_MC_END();
11393 }
11394 }
11395}
11396
11397
11398/**
11399 * @opcode 0xc0
11400 * @opflclass arithmetic
11401 */
11402FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11403{
11404 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11405 IEMOP_HLP_MIN_486();
11406 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11407
11408 /*
11409 * If rm is denoting a register, no more instruction bytes.
11410 */
11411 if (IEM_IS_MODRM_REG_MODE(bRm))
11412 {
11413 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11415 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11416 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11417 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11418
11419 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11420 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11421 IEM_MC_REF_EFLAGS(pEFlags);
11422 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11423
11424 IEM_MC_ADVANCE_RIP_AND_FINISH();
11425 IEM_MC_END();
11426 }
11427 else
11428 {
11429 /*
11430 * We're accessing memory.
11431 */
11432#define IEMOP_BODY_XADD_BYTE(a_fnWorker, a_Type) \
11433 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11436 IEMOP_HLP_DONE_DECODING(); \
11437 \
11438 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11439 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11440 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11441 \
11442 IEM_MC_LOCAL(uint8_t, u8RegCopy); \
11443 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11444 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, u8RegCopy, 1); \
11445 \
11446 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11447 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker, pu8Dst, pu8Reg, pEFlags); \
11448 \
11449 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11450 IEM_MC_COMMIT_EFLAGS(EFlags); \
11451 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy); \
11452 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11453 IEM_MC_END()
11454 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11455 {
11456 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8,RW);
11457 }
11458 else
11459 {
11460 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8_locked,ATOMIC);
11461 }
11462 }
11463}
11464
11465
11466/**
11467 * @opcode 0xc1
11468 * @opflclass arithmetic
11469 */
11470FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11471{
11472 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11473 IEMOP_HLP_MIN_486();
11474 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11475
11476 /*
11477 * If rm is denoting a register, no more instruction bytes.
11478 */
11479 if (IEM_IS_MODRM_REG_MODE(bRm))
11480 {
11481 switch (pVCpu->iem.s.enmEffOpSize)
11482 {
11483 case IEMMODE_16BIT:
11484 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11486 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11487 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11488 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11489
11490 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11491 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11492 IEM_MC_REF_EFLAGS(pEFlags);
11493 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11494
11495 IEM_MC_ADVANCE_RIP_AND_FINISH();
11496 IEM_MC_END();
11497 break;
11498
11499 case IEMMODE_32BIT:
11500 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11502 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11503 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11504 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11505
11506 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11507 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11508 IEM_MC_REF_EFLAGS(pEFlags);
11509 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11510
11511 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
11512 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11513 IEM_MC_ADVANCE_RIP_AND_FINISH();
11514 IEM_MC_END();
11515 break;
11516
11517 case IEMMODE_64BIT:
11518 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11520 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11521 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11522 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11523
11524 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11525 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11526 IEM_MC_REF_EFLAGS(pEFlags);
11527 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11528
11529 IEM_MC_ADVANCE_RIP_AND_FINISH();
11530 IEM_MC_END();
11531 break;
11532
11533 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11534 }
11535 }
11536 else
11537 {
11538 /*
11539 * We're accessing memory.
11540 */
11541#define IEMOP_BODY_XADD_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
11542 do { \
11543 switch (pVCpu->iem.s.enmEffOpSize) \
11544 { \
11545 case IEMMODE_16BIT: \
11546 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11549 IEMOP_HLP_DONE_DECODING(); \
11550 \
11551 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11552 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11553 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11554 \
11555 IEM_MC_LOCAL(uint16_t, u16RegCopy); \
11556 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11557 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, u16RegCopy, 1); \
11558 \
11559 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11560 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker16, pu16Dst, pu16Reg, pEFlags); \
11561 \
11562 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11563 IEM_MC_COMMIT_EFLAGS(EFlags); \
11564 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy); \
11565 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11566 IEM_MC_END(); \
11567 break; \
11568 \
11569 case IEMMODE_32BIT: \
11570 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11573 IEMOP_HLP_DONE_DECODING(); \
11574 \
11575 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11576 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11577 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11578 \
11579 IEM_MC_LOCAL(uint32_t, u32RegCopy); \
11580 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11581 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, u32RegCopy, 1); \
11582 \
11583 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11584 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker32, pu32Dst, pu32Reg, pEFlags); \
11585 \
11586 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11587 IEM_MC_COMMIT_EFLAGS(EFlags); \
11588 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy); \
11589 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11590 IEM_MC_END(); \
11591 break; \
11592 \
11593 case IEMMODE_64BIT: \
11594 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11597 IEMOP_HLP_DONE_DECODING(); \
11598 \
11599 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11600 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11601 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11602 \
11603 IEM_MC_LOCAL(uint64_t, u64RegCopy); \
11604 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11605 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, u64RegCopy, 1); \
11606 \
11607 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11608 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker64, pu64Dst, pu64Reg, pEFlags); \
11609 \
11610 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11611 IEM_MC_COMMIT_EFLAGS(EFlags); \
11612 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy); \
11613 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11614 IEM_MC_END(); \
11615 break; \
11616 \
11617 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11618 } \
11619 } while (0)
11620
11621 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11622 {
11623 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16, iemAImpl_xadd_u32, iemAImpl_xadd_u64,RW);
11624 }
11625 else
11626 {
11627 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16_locked, iemAImpl_xadd_u32_locked, iemAImpl_xadd_u64_locked,ATOMIC);
11628 }
11629 }
11630}
11631
11632
11633/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11634FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11635{
11636 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11637
11638 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11639 if (IEM_IS_MODRM_REG_MODE(bRm))
11640 {
11641 /*
11642 * XMM, XMM.
11643 */
11644 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11645 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11647 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11648 IEM_MC_LOCAL(X86XMMREG, Dst);
11649 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11650 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11651 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11652 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11653 IEM_MC_PREPARE_SSE_USAGE();
11654 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11655 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpps_u128, pDst, pSrc, bImmArg);
11656 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11657 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11658
11659 IEM_MC_ADVANCE_RIP_AND_FINISH();
11660 IEM_MC_END();
11661 }
11662 else
11663 {
11664 /*
11665 * XMM, [mem128].
11666 */
11667 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11668 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11669 IEM_MC_LOCAL(X86XMMREG, Dst);
11670 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11671 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11673
11674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11675 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11676 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11678 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11679 IEM_MC_PREPARE_SSE_USAGE();
11680
11681 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11682 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpps_u128, pDst, pSrc, bImmArg);
11683 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11684 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11685
11686 IEM_MC_ADVANCE_RIP_AND_FINISH();
11687 IEM_MC_END();
11688 }
11689}
11690
11691
11692/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11693FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11694{
11695 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11696
11697 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11698 if (IEM_IS_MODRM_REG_MODE(bRm))
11699 {
11700 /*
11701 * XMM, XMM.
11702 */
11703 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11704 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11706 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11707 IEM_MC_LOCAL(X86XMMREG, Dst);
11708 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11709 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11710 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11711 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11712 IEM_MC_PREPARE_SSE_USAGE();
11713 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11714 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmppd_u128, pDst, pSrc, bImmArg);
11715 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11716 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11717
11718 IEM_MC_ADVANCE_RIP_AND_FINISH();
11719 IEM_MC_END();
11720 }
11721 else
11722 {
11723 /*
11724 * XMM, [mem128].
11725 */
11726 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11727 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11728 IEM_MC_LOCAL(X86XMMREG, Dst);
11729 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11730 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11732
11733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11734 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11735 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11737 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11738 IEM_MC_PREPARE_SSE_USAGE();
11739
11740 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11741 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmppd_u128, pDst, pSrc, bImmArg);
11742 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11743 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11744
11745 IEM_MC_ADVANCE_RIP_AND_FINISH();
11746 IEM_MC_END();
11747 }
11748}
11749
11750
11751/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11752FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11753{
11754 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11755
11756 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11757 if (IEM_IS_MODRM_REG_MODE(bRm))
11758 {
11759 /*
11760 * XMM32, XMM32.
11761 */
11762 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11763 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11765 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11766 IEM_MC_LOCAL(X86XMMREG, Dst);
11767 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11768 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11769 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11770 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11771 IEM_MC_PREPARE_SSE_USAGE();
11772 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11773 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpss_u128, pDst, pSrc, bImmArg);
11774 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11775 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11776
11777 IEM_MC_ADVANCE_RIP_AND_FINISH();
11778 IEM_MC_END();
11779 }
11780 else
11781 {
11782 /*
11783 * XMM32, [mem32].
11784 */
11785 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11786 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11787 IEM_MC_LOCAL(X86XMMREG, Dst);
11788 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11789 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11791
11792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11793 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11794 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11796 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11797 IEM_MC_PREPARE_SSE_USAGE();
11798
11799 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
11800 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11801 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpss_u128, pDst, pSrc, bImmArg);
11802 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11803 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11804
11805 IEM_MC_ADVANCE_RIP_AND_FINISH();
11806 IEM_MC_END();
11807 }
11808}
11809
11810
11811/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11812FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11813{
11814 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11815
11816 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11817 if (IEM_IS_MODRM_REG_MODE(bRm))
11818 {
11819 /*
11820 * XMM64, XMM64.
11821 */
11822 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11823 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11825 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11826 IEM_MC_LOCAL(X86XMMREG, Dst);
11827 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11828 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11829 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11830 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11831 IEM_MC_PREPARE_SSE_USAGE();
11832 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11833 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpsd_u128, pDst, pSrc, bImmArg);
11834 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11835 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11836
11837 IEM_MC_ADVANCE_RIP_AND_FINISH();
11838 IEM_MC_END();
11839 }
11840 else
11841 {
11842 /*
11843 * XMM64, [mem64].
11844 */
11845 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11846 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11847 IEM_MC_LOCAL(X86XMMREG, Dst);
11848 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11849 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11851
11852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11853 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11854 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11856 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11857 IEM_MC_PREPARE_SSE_USAGE();
11858
11859 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
11860 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11861 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpsd_u128, pDst, pSrc, bImmArg);
11862 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11863 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11864
11865 IEM_MC_ADVANCE_RIP_AND_FINISH();
11866 IEM_MC_END();
11867 }
11868}
11869
11870
11871/** Opcode 0x0f 0xc3. */
11872FNIEMOP_DEF(iemOp_movnti_My_Gy)
11873{
11874 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
11875
11876 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11877
11878 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
11879 if (IEM_IS_MODRM_MEM_MODE(bRm))
11880 {
11881 switch (pVCpu->iem.s.enmEffOpSize)
11882 {
11883 case IEMMODE_32BIT:
11884 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11885 IEM_MC_LOCAL(uint32_t, u32Value);
11886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11887
11888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11890
11891 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11892 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11893 IEM_MC_ADVANCE_RIP_AND_FINISH();
11894 IEM_MC_END();
11895 break;
11896
11897 case IEMMODE_64BIT:
11898 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11899 IEM_MC_LOCAL(uint64_t, u64Value);
11900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11901
11902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11904
11905 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11906 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11907 IEM_MC_ADVANCE_RIP_AND_FINISH();
11908 IEM_MC_END();
11909 break;
11910
11911 case IEMMODE_16BIT:
11912 /** @todo check this form. */
11913 IEMOP_RAISE_INVALID_OPCODE_RET();
11914
11915 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11916 }
11917 }
11918 else
11919 IEMOP_RAISE_INVALID_OPCODE_RET();
11920}
11921
11922
11923/* Opcode 0x66 0x0f 0xc3 - invalid */
11924/* Opcode 0xf3 0x0f 0xc3 - invalid */
11925/* Opcode 0xf2 0x0f 0xc3 - invalid */
11926
11927
11928/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
11929FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
11930{
11931 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
11932 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11933 if (IEM_IS_MODRM_REG_MODE(bRm))
11934 {
11935 /*
11936 * Register, register.
11937 */
11938 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11939 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11940 IEM_MC_LOCAL(uint16_t, uValue);
11941
11942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
11943 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
11944 IEM_MC_PREPARE_FPU_USAGE();
11945 IEM_MC_FPU_TO_MMX_MODE();
11946
11947 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
11948 IEM_MC_STORE_MREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
11949
11950 IEM_MC_ADVANCE_RIP_AND_FINISH();
11951 IEM_MC_END();
11952 }
11953 else
11954 {
11955 /*
11956 * Register, memory.
11957 */
11958 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11960 IEM_MC_LOCAL(uint16_t, uValue);
11961
11962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11963 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
11965 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
11966 IEM_MC_PREPARE_FPU_USAGE();
11967
11968 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11969 IEM_MC_FPU_TO_MMX_MODE();
11970 IEM_MC_STORE_MREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
11971
11972 IEM_MC_ADVANCE_RIP_AND_FINISH();
11973 IEM_MC_END();
11974 }
11975}
11976
11977
11978/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
11979FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
11980{
11981 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11982 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11983 if (IEM_IS_MODRM_REG_MODE(bRm))
11984 {
11985 /*
11986 * Register, register.
11987 */
11988 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11989 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11991
11992 IEM_MC_LOCAL(uint16_t, uValue);
11993 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11994 IEM_MC_PREPARE_SSE_USAGE();
11995
11996 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
11997 IEM_MC_STORE_XREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
11998 IEM_MC_ADVANCE_RIP_AND_FINISH();
11999 IEM_MC_END();
12000 }
12001 else
12002 {
12003 /*
12004 * Register, memory.
12005 */
12006 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12008 IEM_MC_LOCAL(uint16_t, uValue);
12009
12010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12011 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12013 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12014 IEM_MC_PREPARE_SSE_USAGE();
12015
12016 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12017 IEM_MC_STORE_XREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
12018 IEM_MC_ADVANCE_RIP_AND_FINISH();
12019 IEM_MC_END();
12020 }
12021}
12022
12023
12024/* Opcode 0xf3 0x0f 0xc4 - invalid */
12025/* Opcode 0xf2 0x0f 0xc4 - invalid */
12026
12027
12028/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
12029FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
12030{
12031 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
12032 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12033 if (IEM_IS_MODRM_REG_MODE(bRm))
12034 {
12035 /*
12036 * Greg32, MMX, imm8.
12037 */
12038 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12039 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12041 IEM_MC_LOCAL(uint16_t, uValue);
12042 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12043 IEM_MC_PREPARE_FPU_USAGE();
12044 IEM_MC_FPU_TO_MMX_MODE();
12045 IEM_MC_FETCH_MREG_U16(uValue, IEM_GET_MODRM_RM_8(bRm), bImm & 3);
12046 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
12047 IEM_MC_ADVANCE_RIP_AND_FINISH();
12048 IEM_MC_END();
12049 }
12050 /* No memory operand. */
12051 else
12052 IEMOP_RAISE_INVALID_OPCODE_RET();
12053}
12054
12055
12056/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12057FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12058{
12059 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12061 if (IEM_IS_MODRM_REG_MODE(bRm))
12062 {
12063 /*
12064 * Greg32, XMM, imm8.
12065 */
12066 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12067 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12069 IEM_MC_LOCAL(uint16_t, uValue);
12070 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12071 IEM_MC_PREPARE_SSE_USAGE();
12072 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
12073 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
12074 IEM_MC_ADVANCE_RIP_AND_FINISH();
12075 IEM_MC_END();
12076 }
12077 /* No memory operand. */
12078 else
12079 IEMOP_RAISE_INVALID_OPCODE_RET();
12080}
12081
12082
12083/* Opcode 0xf3 0x0f 0xc5 - invalid */
12084/* Opcode 0xf2 0x0f 0xc5 - invalid */
12085
12086
12087/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12088FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12089{
12090 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12091 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12092 if (IEM_IS_MODRM_REG_MODE(bRm))
12093 {
12094 /*
12095 * XMM, XMM, imm8.
12096 */
12097 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12098 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12100 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12101 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12102 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12103 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12104 IEM_MC_PREPARE_SSE_USAGE();
12105 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12106 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12107 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12108 IEM_MC_ADVANCE_RIP_AND_FINISH();
12109 IEM_MC_END();
12110 }
12111 else
12112 {
12113 /*
12114 * XMM, [mem128], imm8.
12115 */
12116 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12117 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12118 IEM_MC_LOCAL(RTUINT128U, uSrc);
12119 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12121
12122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12123 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12124 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12126 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12127 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12128
12129 IEM_MC_PREPARE_SSE_USAGE();
12130 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12131 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12132
12133 IEM_MC_ADVANCE_RIP_AND_FINISH();
12134 IEM_MC_END();
12135 }
12136}
12137
12138
12139/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12140FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12141{
12142 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12143 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12144 if (IEM_IS_MODRM_REG_MODE(bRm))
12145 {
12146 /*
12147 * XMM, XMM, imm8.
12148 */
12149 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12150 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12152 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12153 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12154 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12155 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12156 IEM_MC_PREPARE_SSE_USAGE();
12157 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12158 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12159 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12160 IEM_MC_ADVANCE_RIP_AND_FINISH();
12161 IEM_MC_END();
12162 }
12163 else
12164 {
12165 /*
12166 * XMM, [mem128], imm8.
12167 */
12168 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12169 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12170 IEM_MC_LOCAL(RTUINT128U, uSrc);
12171 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12173
12174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12175 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12176 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12178 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12179 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12180
12181 IEM_MC_PREPARE_SSE_USAGE();
12182 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12183 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12184
12185 IEM_MC_ADVANCE_RIP_AND_FINISH();
12186 IEM_MC_END();
12187 }
12188}
12189
12190
12191/* Opcode 0xf3 0x0f 0xc6 - invalid */
12192/* Opcode 0xf2 0x0f 0xc6 - invalid */
12193
12194
12195/**
12196 * @opmaps grp9
12197 * @opcode /1
12198 * @opcodesub !11 mr/reg rex.w=0
12199 * @oppfx n/a
12200 * @opflmodify zf
12201 */
12202FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12203{
12204 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12205#define IEMOP_BODY_CMPXCHG8B(a_fnWorker, a_Type) \
12206 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
12207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12209 IEMOP_HLP_DONE_DECODING_EX(fCmpXchg8b); \
12210 \
12211 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12212 IEM_MC_ARG(uint64_t *, pu64MemDst, 0); \
12213 IEM_MC_MEM_MAP_U64_##a_Type(pu64MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12214 \
12215 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx); \
12216 IEM_MC_FETCH_GREG_PAIR_U32(u64EaxEdx, X86_GREG_xAX, X86_GREG_xDX); \
12217 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EaxEdx, u64EaxEdx, 1); \
12218 \
12219 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx); \
12220 IEM_MC_FETCH_GREG_PAIR_U32(u64EbxEcx, X86_GREG_xBX, X86_GREG_xCX); \
12221 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EbxEcx, u64EbxEcx, 2); \
12222 \
12223 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
12224 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags); \
12225 \
12226 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12227 IEM_MC_COMMIT_EFLAGS(EFlags); \
12228 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12229 IEM_MC_STORE_GREG_PAIR_U32(X86_GREG_xAX, X86_GREG_xDX, u64EaxEdx); \
12230 } IEM_MC_ENDIF(); \
12231 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12232 \
12233 IEM_MC_END()
12234 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12235 {
12236 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b,RW);
12237 }
12238 else
12239 {
12240 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b_locked,ATOMIC);
12241 }
12242}
12243
12244
12245/**
12246 * @opmaps grp9
12247 * @opcode /1
12248 * @opcodesub !11 mr/reg rex.w=1
12249 * @oppfx n/a
12250 * @opflmodify zf
12251 */
12252FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12253{
12254 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12255 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12256 {
12257 /*
12258 * This is hairy, very hairy macro fun. We're walking a fine line
12259 * here to make the code parsable by IEMAllInstPython.py and fit into
12260 * the patterns IEMAllThrdPython.py requires for the code morphing.
12261 */
12262#define BODY_CMPXCHG16B_HEAD(bUnmapInfoStmt, a_Type) \
12263 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
12264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12266 IEMOP_HLP_DONE_DECODING(); \
12267 \
12268 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16); \
12269 bUnmapInfoStmt; \
12270 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0); \
12271 IEM_MC_MEM_MAP_U128_##a_Type(pu128MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12272 \
12273 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx); \
12274 IEM_MC_FETCH_GREG_PAIR_U64(u128RaxRdx, X86_GREG_xAX, X86_GREG_xDX); \
12275 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RaxRdx, u128RaxRdx, 1); \
12276 \
12277 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx); \
12278 IEM_MC_FETCH_GREG_PAIR_U64(u128RbxRcx, X86_GREG_xBX, X86_GREG_xCX); \
12279 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RbxRcx, u128RbxRcx, 2); \
12280 \
12281 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3)
12282
12283#define BODY_CMPXCHG16B_TAIL(a_Type) \
12284 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12285 IEM_MC_COMMIT_EFLAGS(EFlags); \
12286 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12287 IEM_MC_STORE_GREG_PAIR_U64(X86_GREG_xAX, X86_GREG_xDX, u128RaxRdx); \
12288 } IEM_MC_ENDIF(); \
12289 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12290 IEM_MC_END()
12291
12292#ifdef RT_ARCH_AMD64
12293 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12294 {
12295 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12296 {
12297 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12298 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12299 BODY_CMPXCHG16B_TAIL(RW);
12300 }
12301 else
12302 {
12303 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12304 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12305 BODY_CMPXCHG16B_TAIL(ATOMIC);
12306 }
12307 }
12308 else
12309 { /* (see comments in #else case below) */
12310 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12311 {
12312 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12313 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12314 BODY_CMPXCHG16B_TAIL(RW);
12315 }
12316 else
12317 {
12318 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12319 IEM_MC_CALL_CIMPL_5(IEM_CIMPL_F_STATUS_FLAGS,
12320 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12321 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12322 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx,
12323 pEFlags, bUnmapInfo);
12324 IEM_MC_END();
12325 }
12326 }
12327
12328#elif defined(RT_ARCH_ARM64)
12329 /** @todo may require fallback for unaligned accesses... */
12330 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12331 {
12332 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12333 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12334 BODY_CMPXCHG16B_TAIL(RW);
12335 }
12336 else
12337 {
12338 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12339 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12340 BODY_CMPXCHG16B_TAIL(ATOMIC);
12341 }
12342
12343#else
12344 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12345 accesses and not all all atomic, which works fine on in UNI CPU guest
12346 configuration (ignoring DMA). If guest SMP is active we have no choice
12347 but to use a rendezvous callback here. Sigh. */
12348 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12349 {
12350 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12351 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12352 BODY_CMPXCHG16B_TAIL(RW);
12353 }
12354 else
12355 {
12356 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12357 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12358 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12359 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12360 iemCImpl_cmpxchg16b_fallback_rendezvous,
12361 pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12362 IEM_MC_END();
12363 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12364 }
12365#endif
12366
12367#undef BODY_CMPXCHG16B
12368 }
12369 Log(("cmpxchg16b -> #UD\n"));
12370 IEMOP_RAISE_INVALID_OPCODE_RET();
12371}
12372
12373FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12374{
12375 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12376 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12377 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12378}
12379
12380
12381/** Opcode 0x0f 0xc7 11/6. */
12382FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12383{
12384 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12385 IEMOP_RAISE_INVALID_OPCODE_RET();
12386
12387 if (IEM_IS_MODRM_REG_MODE(bRm))
12388 {
12389 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12391 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12392 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12393 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12394 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12395 iemCImpl_rdrand, iReg, enmEffOpSize);
12396 IEM_MC_END();
12397 }
12398 /* Register only. */
12399 else
12400 IEMOP_RAISE_INVALID_OPCODE_RET();
12401}
12402
12403/** Opcode 0x0f 0xc7 !11/6. */
12404#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12405FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12406{
12407 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12408 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12409 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12410 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12411 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12413 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12414 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12415 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12416 IEM_MC_END();
12417}
12418#else
12419FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12420#endif
12421
12422/** Opcode 0x66 0x0f 0xc7 !11/6. */
12423#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12424FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12425{
12426 IEMOP_MNEMONIC(vmclear, "vmclear");
12427 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12428 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12429 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12430 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12432 IEMOP_HLP_DONE_DECODING();
12433 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12434 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12435 IEM_MC_END();
12436}
12437#else
12438FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12439#endif
12440
12441/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12442#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12443FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12444{
12445 IEMOP_MNEMONIC(vmxon, "vmxon");
12446 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12447 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12448 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12450 IEMOP_HLP_DONE_DECODING();
12451 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12452 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12453 IEM_MC_END();
12454}
12455#else
12456FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12457#endif
12458
12459/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12460#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12461FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12462{
12463 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12464 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12465 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12466 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12467 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12469 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12470 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12471 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12472 IEM_MC_END();
12473}
12474#else
12475FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12476#endif
12477
12478/** Opcode 0x0f 0xc7 11/7. */
12479FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12480{
12481 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12482 IEMOP_RAISE_INVALID_OPCODE_RET();
12483
12484 if (IEM_IS_MODRM_REG_MODE(bRm))
12485 {
12486 /* register destination. */
12487 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12489 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12490 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12491 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12492 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12493 iemCImpl_rdseed, iReg, enmEffOpSize);
12494 IEM_MC_END();
12495 }
12496 /* Register only. */
12497 else
12498 IEMOP_RAISE_INVALID_OPCODE_RET();
12499}
12500
12501/**
12502 * Group 9 jump table for register variant.
12503 */
12504IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12505{ /* pfx: none, 066h, 0f3h, 0f2h */
12506 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12507 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12508 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12509 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12510 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12511 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12512 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12513 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12514};
12515AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12516
12517
12518/**
12519 * Group 9 jump table for memory variant.
12520 */
12521IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12522{ /* pfx: none, 066h, 0f3h, 0f2h */
12523 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12524 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12525 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12526 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12527 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12528 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12529 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12530 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12531};
12532AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12533
12534
12535/** Opcode 0x0f 0xc7. */
12536FNIEMOP_DEF(iemOp_Grp9)
12537{
12538 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12539 if (IEM_IS_MODRM_REG_MODE(bRm))
12540 /* register, register */
12541 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12542 + pVCpu->iem.s.idxPrefix], bRm);
12543 /* memory, register */
12544 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12545 + pVCpu->iem.s.idxPrefix], bRm);
12546}
12547
12548
12549/**
12550 * Common 'bswap register' helper.
12551 */
12552FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12553{
12554 switch (pVCpu->iem.s.enmEffOpSize)
12555 {
12556 case IEMMODE_16BIT:
12557 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
12558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12559 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12560 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12561 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12562 IEM_MC_ADVANCE_RIP_AND_FINISH();
12563 IEM_MC_END();
12564 break;
12565
12566 case IEMMODE_32BIT:
12567 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
12568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12569 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12570 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12571 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12572 IEM_MC_CLEAR_HIGH_GREG_U64(iReg);
12573 IEM_MC_ADVANCE_RIP_AND_FINISH();
12574 IEM_MC_END();
12575 break;
12576
12577 case IEMMODE_64BIT:
12578 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
12579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12580 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12581 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12582 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12583 IEM_MC_ADVANCE_RIP_AND_FINISH();
12584 IEM_MC_END();
12585 break;
12586
12587 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12588 }
12589}
12590
12591
12592/** Opcode 0x0f 0xc8. */
12593FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12594{
12595 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12596 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12597 prefix. REX.B is the correct prefix it appears. For a parallel
12598 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12599 IEMOP_HLP_MIN_486();
12600 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12601}
12602
12603
12604/** Opcode 0x0f 0xc9. */
12605FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12606{
12607 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12608 IEMOP_HLP_MIN_486();
12609 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12610}
12611
12612
12613/** Opcode 0x0f 0xca. */
12614FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12615{
12616 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12617 IEMOP_HLP_MIN_486();
12618 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12619}
12620
12621
12622/** Opcode 0x0f 0xcb. */
12623FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12624{
12625 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12626 IEMOP_HLP_MIN_486();
12627 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12628}
12629
12630
12631/** Opcode 0x0f 0xcc. */
12632FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12633{
12634 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12635 IEMOP_HLP_MIN_486();
12636 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12637}
12638
12639
12640/** Opcode 0x0f 0xcd. */
12641FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12642{
12643 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12644 IEMOP_HLP_MIN_486();
12645 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12646}
12647
12648
12649/** Opcode 0x0f 0xce. */
12650FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12651{
12652 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12653 IEMOP_HLP_MIN_486();
12654 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12655}
12656
12657
12658/** Opcode 0x0f 0xcf. */
12659FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12660{
12661 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12662 IEMOP_HLP_MIN_486();
12663 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12664}
12665
12666
12667/* Opcode 0x0f 0xd0 - invalid */
12668
12669
12670/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12671FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12672{
12673 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12674 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12675}
12676
12677
12678/* Opcode 0xf3 0x0f 0xd0 - invalid */
12679
12680
12681/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12682FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12683{
12684 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12685 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12686}
12687
12688
12689
12690/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12691FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12692{
12693 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12694 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12695}
12696
12697/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12698FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12699{
12700 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12701 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12702}
12703
12704/* Opcode 0xf3 0x0f 0xd1 - invalid */
12705/* Opcode 0xf2 0x0f 0xd1 - invalid */
12706
12707/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12708FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12709{
12710 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12711 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12712}
12713
12714
12715/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12716FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12717{
12718 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12719 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12720}
12721
12722
12723/* Opcode 0xf3 0x0f 0xd2 - invalid */
12724/* Opcode 0xf2 0x0f 0xd2 - invalid */
12725
12726/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12727FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12728{
12729 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12730 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12731}
12732
12733
12734/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12735FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12736{
12737 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12738 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12739}
12740
12741
12742/* Opcode 0xf3 0x0f 0xd3 - invalid */
12743/* Opcode 0xf2 0x0f 0xd3 - invalid */
12744
12745
12746/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12747FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12748{
12749 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12750 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
12751}
12752
12753
12754/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12755FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12756{
12757 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12758 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddq_u128);
12759}
12760
12761
12762/* Opcode 0xf3 0x0f 0xd4 - invalid */
12763/* Opcode 0xf2 0x0f 0xd4 - invalid */
12764
12765/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12766FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12767{
12768 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12769 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmullw_u64);
12770}
12771
12772/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12773FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12774{
12775 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12776 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmullw_u128);
12777}
12778
12779
12780/* Opcode 0xf3 0x0f 0xd5 - invalid */
12781/* Opcode 0xf2 0x0f 0xd5 - invalid */
12782
12783/* Opcode 0x0f 0xd6 - invalid */
12784
12785/**
12786 * @opcode 0xd6
12787 * @oppfx 0x66
12788 * @opcpuid sse2
12789 * @opgroup og_sse2_pcksclr_datamove
12790 * @opxcpttype none
12791 * @optest op1=-1 op2=2 -> op1=2
12792 * @optest op1=0 op2=-42 -> op1=-42
12793 */
12794FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12795{
12796 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12798 if (IEM_IS_MODRM_REG_MODE(bRm))
12799 {
12800 /*
12801 * Register, register.
12802 */
12803 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12805 IEM_MC_LOCAL(uint64_t, uSrc);
12806
12807 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12808 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12809
12810 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12811 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12812
12813 IEM_MC_ADVANCE_RIP_AND_FINISH();
12814 IEM_MC_END();
12815 }
12816 else
12817 {
12818 /*
12819 * Memory, register.
12820 */
12821 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12822 IEM_MC_LOCAL(uint64_t, uSrc);
12823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12824
12825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12827 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12828 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12829
12830 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12831 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12832
12833 IEM_MC_ADVANCE_RIP_AND_FINISH();
12834 IEM_MC_END();
12835 }
12836}
12837
12838
12839/**
12840 * @opcode 0xd6
12841 * @opcodesub 11 mr/reg
12842 * @oppfx f3
12843 * @opcpuid sse2
12844 * @opgroup og_sse2_simdint_datamove
12845 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12846 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12847 */
12848FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12849{
12850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12851 if (IEM_IS_MODRM_REG_MODE(bRm))
12852 {
12853 /*
12854 * Register, register.
12855 */
12856 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12857 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12859 IEM_MC_LOCAL(uint64_t, uSrc);
12860
12861 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12862 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12863 IEM_MC_FPU_TO_MMX_MODE();
12864
12865 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
12866 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
12867
12868 IEM_MC_ADVANCE_RIP_AND_FINISH();
12869 IEM_MC_END();
12870 }
12871
12872 /**
12873 * @opdone
12874 * @opmnemonic udf30fd6mem
12875 * @opcode 0xd6
12876 * @opcodesub !11 mr/reg
12877 * @oppfx f3
12878 * @opunused intel-modrm
12879 * @opcpuid sse
12880 * @optest ->
12881 */
12882 else
12883 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12884}
12885
12886
12887/**
12888 * @opcode 0xd6
12889 * @opcodesub 11 mr/reg
12890 * @oppfx f2
12891 * @opcpuid sse2
12892 * @opgroup og_sse2_simdint_datamove
12893 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12894 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12895 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
12896 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
12897 * @optest op1=-42 op2=0xfedcba9876543210
12898 * -> op1=0xfedcba9876543210 ftw=0xff
12899 */
12900FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
12901{
12902 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12903 if (IEM_IS_MODRM_REG_MODE(bRm))
12904 {
12905 /*
12906 * Register, register.
12907 */
12908 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12909 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12911 IEM_MC_LOCAL(uint64_t, uSrc);
12912
12913 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12914 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12915 IEM_MC_FPU_TO_MMX_MODE();
12916
12917 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
12918 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
12919
12920 IEM_MC_ADVANCE_RIP_AND_FINISH();
12921 IEM_MC_END();
12922 }
12923
12924 /**
12925 * @opdone
12926 * @opmnemonic udf20fd6mem
12927 * @opcode 0xd6
12928 * @opcodesub !11 mr/reg
12929 * @oppfx f2
12930 * @opunused intel-modrm
12931 * @opcpuid sse
12932 * @optest ->
12933 */
12934 else
12935 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12936}
12937
12938
12939/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
12940FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
12941{
12942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12943 /* Docs says register only. */
12944 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12945 {
12946 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12947 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
12948 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12950 IEM_MC_ARG(uint64_t *, puDst, 0);
12951 IEM_MC_ARG(uint64_t const *, puSrc, 1);
12952 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12953 IEM_MC_PREPARE_FPU_USAGE();
12954 IEM_MC_FPU_TO_MMX_MODE();
12955
12956 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12957 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
12958 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
12959
12960 IEM_MC_ADVANCE_RIP_AND_FINISH();
12961 IEM_MC_END();
12962 }
12963 else
12964 IEMOP_RAISE_INVALID_OPCODE_RET();
12965}
12966
12967
12968/** Opcode 0x66 0x0f 0xd7 - */
12969FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
12970{
12971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12972 /* Docs says register only. */
12973 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12974 {
12975 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12976 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
12977 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12979 IEM_MC_ARG(uint64_t *, puDst, 0);
12980 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12981 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12982 IEM_MC_PREPARE_SSE_USAGE();
12983 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12984 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12985 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
12986 IEM_MC_ADVANCE_RIP_AND_FINISH();
12987 IEM_MC_END();
12988 }
12989 else
12990 IEMOP_RAISE_INVALID_OPCODE_RET();
12991}
12992
12993
12994/* Opcode 0xf3 0x0f 0xd7 - invalid */
12995/* Opcode 0xf2 0x0f 0xd7 - invalid */
12996
12997
12998/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
12999FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
13000{
13001 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13002 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubusb_u64);
13003}
13004
13005
13006/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
13007FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
13008{
13009 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13010 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubusb_u128);
13011}
13012
13013
13014/* Opcode 0xf3 0x0f 0xd8 - invalid */
13015/* Opcode 0xf2 0x0f 0xd8 - invalid */
13016
13017/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13018FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13019{
13020 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13021 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubusw_u64);
13022}
13023
13024
13025/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13026FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13027{
13028 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13029 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubusw_u128);
13030}
13031
13032
13033/* Opcode 0xf3 0x0f 0xd9 - invalid */
13034/* Opcode 0xf2 0x0f 0xd9 - invalid */
13035
13036/** Opcode 0x0f 0xda - pminub Pq, Qq */
13037FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13038{
13039 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13040 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pminub_u64);
13041}
13042
13043
13044/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13045FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13046{
13047 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13048 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pminub_u128);
13049}
13050
13051/* Opcode 0xf3 0x0f 0xda - invalid */
13052/* Opcode 0xf2 0x0f 0xda - invalid */
13053
13054/** Opcode 0x0f 0xdb - pand Pq, Qq */
13055FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13056{
13057 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13058 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pand_u64);
13059}
13060
13061
13062/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13063FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13064{
13065 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13066 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pand_u128);
13067}
13068
13069
13070/* Opcode 0xf3 0x0f 0xdb - invalid */
13071/* Opcode 0xf2 0x0f 0xdb - invalid */
13072
13073/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13074FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13075{
13076 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13077 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddusb_u64);
13078}
13079
13080
13081/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13082FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13083{
13084 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13085 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddusb_u128);
13086}
13087
13088
13089/* Opcode 0xf3 0x0f 0xdc - invalid */
13090/* Opcode 0xf2 0x0f 0xdc - invalid */
13091
13092/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13093FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13094{
13095 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13096 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddusw_u64);
13097}
13098
13099
13100/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13101FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13102{
13103 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13104 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddusw_u128);
13105}
13106
13107
13108/* Opcode 0xf3 0x0f 0xdd - invalid */
13109/* Opcode 0xf2 0x0f 0xdd - invalid */
13110
13111/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13112FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13113{
13114 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13115 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmaxub_u64);
13116}
13117
13118
13119/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13120FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13121{
13122 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13123 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaxub_u128);
13124}
13125
13126/* Opcode 0xf3 0x0f 0xde - invalid */
13127/* Opcode 0xf2 0x0f 0xde - invalid */
13128
13129
13130/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13131FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13132{
13133 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13134 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pandn_u64);
13135}
13136
13137
13138/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13139FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13140{
13141 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13142 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pandn_u128);
13143}
13144
13145
13146/* Opcode 0xf3 0x0f 0xdf - invalid */
13147/* Opcode 0xf2 0x0f 0xdf - invalid */
13148
13149/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13150FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13151{
13152 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13153 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13154}
13155
13156
13157/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13158FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13159{
13160 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13161 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13162}
13163
13164
13165/* Opcode 0xf3 0x0f 0xe0 - invalid */
13166/* Opcode 0xf2 0x0f 0xe0 - invalid */
13167
13168/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13169FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13170{
13171 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13172 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13173}
13174
13175
13176/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13177FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13178{
13179 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13180 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13181}
13182
13183
13184/* Opcode 0xf3 0x0f 0xe1 - invalid */
13185/* Opcode 0xf2 0x0f 0xe1 - invalid */
13186
13187/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13188FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13189{
13190 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13191 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13192}
13193
13194
13195/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13196FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13197{
13198 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13199 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13200}
13201
13202
13203/* Opcode 0xf3 0x0f 0xe2 - invalid */
13204/* Opcode 0xf2 0x0f 0xe2 - invalid */
13205
13206/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13207FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13208{
13209 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13210 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13211}
13212
13213
13214/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13215FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13216{
13217 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13218 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13219}
13220
13221
13222/* Opcode 0xf3 0x0f 0xe3 - invalid */
13223/* Opcode 0xf2 0x0f 0xe3 - invalid */
13224
13225/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13226FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13227{
13228 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13229 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13230}
13231
13232
13233/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13234FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13235{
13236 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13237 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13238}
13239
13240
13241/* Opcode 0xf3 0x0f 0xe4 - invalid */
13242/* Opcode 0xf2 0x0f 0xe4 - invalid */
13243
13244/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13245FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13246{
13247 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13248 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmulhw_u64);
13249}
13250
13251
13252/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13253FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13254{
13255 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13256 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhw_u128);
13257}
13258
13259
13260/* Opcode 0xf3 0x0f 0xe5 - invalid */
13261/* Opcode 0xf2 0x0f 0xe5 - invalid */
13262/* Opcode 0x0f 0xe6 - invalid */
13263
13264
13265/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13266FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13267{
13268 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13269 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13270}
13271
13272
13273/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13274FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13275{
13276 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13277 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13278}
13279
13280
13281/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13282FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13283{
13284 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13285 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13286}
13287
13288
13289/**
13290 * @opcode 0xe7
13291 * @opcodesub !11 mr/reg
13292 * @oppfx none
13293 * @opcpuid sse
13294 * @opgroup og_sse1_cachect
13295 * @opxcpttype none
13296 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13297 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13298 */
13299FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13300{
13301 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13303 if (IEM_IS_MODRM_MEM_MODE(bRm))
13304 {
13305 /* Register, memory. */
13306 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13307 IEM_MC_LOCAL(uint64_t, uSrc);
13308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13309
13310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13312 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13313 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13314 IEM_MC_FPU_TO_MMX_MODE();
13315
13316 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13317 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13318
13319 IEM_MC_ADVANCE_RIP_AND_FINISH();
13320 IEM_MC_END();
13321 }
13322 /**
13323 * @opdone
13324 * @opmnemonic ud0fe7reg
13325 * @opcode 0xe7
13326 * @opcodesub 11 mr/reg
13327 * @oppfx none
13328 * @opunused immediate
13329 * @opcpuid sse
13330 * @optest ->
13331 */
13332 else
13333 IEMOP_RAISE_INVALID_OPCODE_RET();
13334}
13335
13336/**
13337 * @opcode 0xe7
13338 * @opcodesub !11 mr/reg
13339 * @oppfx 0x66
13340 * @opcpuid sse2
13341 * @opgroup og_sse2_cachect
13342 * @opxcpttype 1
13343 * @optest op1=-1 op2=2 -> op1=2
13344 * @optest op1=0 op2=-42 -> op1=-42
13345 */
13346FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13347{
13348 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13349 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13350 if (IEM_IS_MODRM_MEM_MODE(bRm))
13351 {
13352 /* Register, memory. */
13353 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13354 IEM_MC_LOCAL(RTUINT128U, uSrc);
13355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13356
13357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13359 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13360 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13361
13362 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13363 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13364
13365 IEM_MC_ADVANCE_RIP_AND_FINISH();
13366 IEM_MC_END();
13367 }
13368
13369 /**
13370 * @opdone
13371 * @opmnemonic ud660fe7reg
13372 * @opcode 0xe7
13373 * @opcodesub 11 mr/reg
13374 * @oppfx 0x66
13375 * @opunused immediate
13376 * @opcpuid sse
13377 * @optest ->
13378 */
13379 else
13380 IEMOP_RAISE_INVALID_OPCODE_RET();
13381}
13382
13383/* Opcode 0xf3 0x0f 0xe7 - invalid */
13384/* Opcode 0xf2 0x0f 0xe7 - invalid */
13385
13386
13387/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13388FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13389{
13390 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13391 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubsb_u64);
13392}
13393
13394
13395/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13396FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13397{
13398 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13399 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubsb_u128);
13400}
13401
13402
13403/* Opcode 0xf3 0x0f 0xe8 - invalid */
13404/* Opcode 0xf2 0x0f 0xe8 - invalid */
13405
13406/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13407FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13408{
13409 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13410 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubsw_u64);
13411}
13412
13413
13414/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13415FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13416{
13417 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13418 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubsw_u128);
13419}
13420
13421
13422/* Opcode 0xf3 0x0f 0xe9 - invalid */
13423/* Opcode 0xf2 0x0f 0xe9 - invalid */
13424
13425
13426/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13427FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13428{
13429 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13430 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pminsw_u64);
13431}
13432
13433
13434/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13435FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13436{
13437 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13438 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pminsw_u128);
13439}
13440
13441
13442/* Opcode 0xf3 0x0f 0xea - invalid */
13443/* Opcode 0xf2 0x0f 0xea - invalid */
13444
13445
13446/** Opcode 0x0f 0xeb - por Pq, Qq */
13447FNIEMOP_DEF(iemOp_por_Pq_Qq)
13448{
13449 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13450 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_por_u64);
13451}
13452
13453
13454/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13455FNIEMOP_DEF(iemOp_por_Vx_Wx)
13456{
13457 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13458 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_por_u128);
13459}
13460
13461
13462/* Opcode 0xf3 0x0f 0xeb - invalid */
13463/* Opcode 0xf2 0x0f 0xeb - invalid */
13464
13465/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13466FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13467{
13468 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13469 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddsb_u64);
13470}
13471
13472
13473/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13474FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13475{
13476 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13477 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddsb_u128);
13478}
13479
13480
13481/* Opcode 0xf3 0x0f 0xec - invalid */
13482/* Opcode 0xf2 0x0f 0xec - invalid */
13483
13484/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13485FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13486{
13487 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13488 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddsw_u64);
13489}
13490
13491
13492/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13493FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13494{
13495 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13496 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddsw_u128);
13497}
13498
13499
13500/* Opcode 0xf3 0x0f 0xed - invalid */
13501/* Opcode 0xf2 0x0f 0xed - invalid */
13502
13503
13504/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13505FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13506{
13507 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13508 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13509}
13510
13511
13512/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13513FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13514{
13515 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13516 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13517}
13518
13519
13520/* Opcode 0xf3 0x0f 0xee - invalid */
13521/* Opcode 0xf2 0x0f 0xee - invalid */
13522
13523
13524/** Opcode 0x0f 0xef - pxor Pq, Qq */
13525FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13526{
13527 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13528 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pxor_u64);
13529}
13530
13531
13532/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13533FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13534{
13535 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13536 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pxor_u128);
13537}
13538
13539
13540/* Opcode 0xf3 0x0f 0xef - invalid */
13541/* Opcode 0xf2 0x0f 0xef - invalid */
13542
13543/* Opcode 0x0f 0xf0 - invalid */
13544/* Opcode 0x66 0x0f 0xf0 - invalid */
13545
13546
13547/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13548FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13549{
13550 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13551 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13552 if (IEM_IS_MODRM_REG_MODE(bRm))
13553 {
13554 /*
13555 * Register, register - (not implemented, assuming it raises \#UD).
13556 */
13557 IEMOP_RAISE_INVALID_OPCODE_RET();
13558 }
13559 else
13560 {
13561 /*
13562 * Register, memory.
13563 */
13564 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13565 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13567
13568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
13570 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13571 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13572 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13573 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13574
13575 IEM_MC_ADVANCE_RIP_AND_FINISH();
13576 IEM_MC_END();
13577 }
13578}
13579
13580
13581/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13582FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13583{
13584 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13585 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13586}
13587
13588
13589/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13590FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13591{
13592 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13593 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13594}
13595
13596
13597/* Opcode 0xf2 0x0f 0xf1 - invalid */
13598
13599/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13600FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13601{
13602 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13603 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13604}
13605
13606
13607/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13608FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13609{
13610 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13611 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13612}
13613
13614
13615/* Opcode 0xf2 0x0f 0xf2 - invalid */
13616
13617/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13618FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13619{
13620 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13621 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13622}
13623
13624
13625/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13626FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13627{
13628 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13629 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13630}
13631
13632/* Opcode 0xf2 0x0f 0xf3 - invalid */
13633
13634/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13635FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13636{
13637 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13638 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmuludq_u64);
13639}
13640
13641
13642/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13643FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13644{
13645 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13646 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmuludq_u128);
13647}
13648
13649
13650/* Opcode 0xf2 0x0f 0xf4 - invalid */
13651
13652/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13653FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13654{
13655 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13656 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13657}
13658
13659
13660/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13661FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13662{
13663 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13664 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13665}
13666
13667/* Opcode 0xf2 0x0f 0xf5 - invalid */
13668
13669/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13670FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13671{
13672 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13673 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13674}
13675
13676
13677/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13678FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13679{
13680 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13681 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13682}
13683
13684
13685/* Opcode 0xf2 0x0f 0xf6 - invalid */
13686
13687/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13688FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13689/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13690FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13691/* Opcode 0xf2 0x0f 0xf7 - invalid */
13692
13693
13694/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13695FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13696{
13697 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13698 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubb_u64);
13699}
13700
13701
13702/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13703FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13704{
13705 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13706 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubb_u128);
13707}
13708
13709
13710/* Opcode 0xf2 0x0f 0xf8 - invalid */
13711
13712
13713/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13714FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13715{
13716 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13717 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubw_u64);
13718}
13719
13720
13721/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13722FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13723{
13724 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13725 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubw_u128);
13726}
13727
13728
13729/* Opcode 0xf2 0x0f 0xf9 - invalid */
13730
13731
13732/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13733FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13734{
13735 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13736 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubd_u64);
13737}
13738
13739
13740/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13741FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13742{
13743 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13744 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubd_u128);
13745}
13746
13747
13748/* Opcode 0xf2 0x0f 0xfa - invalid */
13749
13750
13751/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13752FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13753{
13754 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13755 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
13756}
13757
13758
13759/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13760FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13761{
13762 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13763 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubq_u128);
13764}
13765
13766
13767/* Opcode 0xf2 0x0f 0xfb - invalid */
13768
13769
13770/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13771FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13772{
13773 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13774 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddb_u64);
13775}
13776
13777
13778/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13779FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13780{
13781 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13782 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddb_u128);
13783}
13784
13785
13786/* Opcode 0xf2 0x0f 0xfc - invalid */
13787
13788
13789/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13790FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13791{
13792 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13793 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddw_u64);
13794}
13795
13796
13797/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
13798FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
13799{
13800 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13801 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddw_u128);
13802}
13803
13804
13805/* Opcode 0xf2 0x0f 0xfd - invalid */
13806
13807
13808/** Opcode 0x0f 0xfe - paddd Pq, Qq */
13809FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
13810{
13811 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13812 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddd_u64);
13813}
13814
13815
13816/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
13817FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
13818{
13819 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13820 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddd_u128);
13821}
13822
13823
13824/* Opcode 0xf2 0x0f 0xfe - invalid */
13825
13826
13827/** Opcode **** 0x0f 0xff - UD0 */
13828FNIEMOP_DEF(iemOp_ud0)
13829{
13830 IEMOP_MNEMONIC(ud0, "ud0");
13831 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
13832 {
13833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
13834 if (IEM_IS_MODRM_MEM_MODE(bRm))
13835 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
13836 }
13837 IEMOP_HLP_DONE_DECODING();
13838 IEMOP_RAISE_INVALID_OPCODE_RET();
13839}
13840
13841
13842
13843/**
13844 * Two byte opcode map, first byte 0x0f.
13845 *
13846 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
13847 * check if it needs updating as well when making changes.
13848 */
13849const PFNIEMOP g_apfnTwoByteMap[] =
13850{
13851 /* no prefix, 066h prefix f3h prefix, f2h prefix */
13852 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
13853 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
13854 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
13855 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
13856 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
13857 /* 0x05 */ IEMOP_X4(iemOp_syscall),
13858 /* 0x06 */ IEMOP_X4(iemOp_clts),
13859 /* 0x07 */ IEMOP_X4(iemOp_sysret),
13860 /* 0x08 */ IEMOP_X4(iemOp_invd),
13861 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
13862 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
13863 /* 0x0b */ IEMOP_X4(iemOp_ud2),
13864 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
13865 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
13866 /* 0x0e */ IEMOP_X4(iemOp_femms),
13867 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
13868
13869 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
13870 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
13871 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
13872 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13873 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13874 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13875 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
13876 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13877 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
13878 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
13879 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
13880 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
13881 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
13882 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
13883 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
13884 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
13885
13886 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
13887 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
13888 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
13889 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
13890 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
13891 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13892 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
13893 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13894 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13895 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13896 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
13897 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13898 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
13899 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
13900 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13901 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13902
13903 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
13904 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
13905 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
13906 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
13907 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
13908 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
13909 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
13910 /* 0x37 */ IEMOP_X4(iemOp_getsec),
13911 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
13912 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13913 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
13914 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13915 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13916 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13917 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13918 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13919
13920 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
13921 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
13922 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
13923 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
13924 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
13925 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
13926 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
13927 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
13928 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
13929 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
13930 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
13931 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
13932 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
13933 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
13934 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
13935 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
13936
13937 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13938 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
13939 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
13940 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
13941 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13942 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13943 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13944 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13945 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
13946 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
13947 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
13948 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
13949 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
13950 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
13951 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
13952 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
13953
13954 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13955 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13956 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13957 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13958 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13959 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13960 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13961 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13962 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13963 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13964 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13965 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13966 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13967 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13968 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13969 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
13970
13971 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
13972 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
13973 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
13974 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
13975 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13976 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13977 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13978 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13979
13980 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13981 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13982 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13983 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13984 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
13985 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
13986 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
13987 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
13988
13989 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
13990 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
13991 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
13992 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
13993 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
13994 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
13995 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
13996 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
13997 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
13998 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
13999 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
14000 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
14001 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14002 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14003 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14004 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14005
14006 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14007 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14008 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14009 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14010 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14011 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14012 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14013 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14014 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14015 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14016 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14017 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14018 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14019 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14020 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14021 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14022
14023 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14024 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14025 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14026 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14027 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14028 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14029 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14030 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14031 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14032 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14033 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14034 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14035 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14036 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14037 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14038 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14039
14040 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14041 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14042 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14043 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14044 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14045 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14046 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14047 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14048 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14049 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14050 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14051 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14052 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14053 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14054 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14055 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14056
14057 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14058 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14059 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14060 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14061 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14062 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14063 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14064 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14065 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14066 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14067 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14068 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14069 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14070 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14071 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14072 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14073
14074 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14075 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14076 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14077 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14078 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14079 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14080 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14081 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14082 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14083 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14084 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14085 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14086 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14087 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14088 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14089 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14090
14091 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14092 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14093 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14094 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14095 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14096 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14097 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14098 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14099 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14100 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14101 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14102 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14103 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14104 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14105 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14106 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14107
14108 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14109 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14110 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14111 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14112 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14113 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14114 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14115 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14116 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14117 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14118 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14119 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14120 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14121 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14122 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14123 /* 0xff */ IEMOP_X4(iemOp_ud0),
14124};
14125AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14126
14127/** @} */
14128
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette