VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 100111

Last change on this file since 100111 was 100072, checked in by vboxsync, 18 months ago

VMM/IEM: Reworked all the IEM_MC_CALL/DEFER_TO_CIMPL macros to include some clues about what they may end up doing. The IEM_MC_DEFER_TO_CIMPL_X macros now returns implictly and is renamed to IEM_MC_DEFER_TO_CIMPL_X_RET - this will ease adding more code/whatever to follow the return from the call when recompiling and such. Also fixed buggy POP CS in 8086 mode. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 499.9 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 100072 2023-06-05 15:17:42Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * MMX, MMX.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
53 IEM_MC_BEGIN(2, 0);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_FPU_TO_MMX_MODE();
59
60 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
61 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
62 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
63 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
64
65 IEM_MC_ADVANCE_RIP_AND_FINISH();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * MMX, [mem64].
72 */
73 IEM_MC_BEGIN(2, 2);
74 IEM_MC_ARG(uint64_t *, pDst, 0);
75 IEM_MC_LOCAL(uint64_t, uSrc);
76 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
81 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_FPU_USAGE();
85 IEM_MC_FPU_TO_MMX_MODE();
86
87 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
88 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
89 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
90
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94}
95
96
97/**
98 * Common worker for MMX instructions on the form:
99 * pxxx mm1, mm2/mem64
100 *
101 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
102 * no FXSAVE state, just the operands.
103 */
104FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
105{
106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
107 if (IEM_IS_MODRM_REG_MODE(bRm))
108 {
109 /*
110 * MMX, MMX.
111 */
112 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
113 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
115 IEM_MC_BEGIN(2, 0);
116 IEM_MC_ARG(uint64_t *, pDst, 0);
117 IEM_MC_ARG(uint64_t const *, pSrc, 1);
118 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
119 IEM_MC_PREPARE_FPU_USAGE();
120 IEM_MC_FPU_TO_MMX_MODE();
121
122 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
123 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
124 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
125 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
126
127 IEM_MC_ADVANCE_RIP_AND_FINISH();
128 IEM_MC_END();
129 }
130 else
131 {
132 /*
133 * MMX, [mem64].
134 */
135 IEM_MC_BEGIN(2, 2);
136 IEM_MC_ARG(uint64_t *, pDst, 0);
137 IEM_MC_LOCAL(uint64_t, uSrc);
138 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
140
141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
143 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
144 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
145
146 IEM_MC_PREPARE_FPU_USAGE();
147 IEM_MC_FPU_TO_MMX_MODE();
148
149 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
150 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
151 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
152
153 IEM_MC_ADVANCE_RIP_AND_FINISH();
154 IEM_MC_END();
155 }
156}
157
158
159/**
160 * Common worker for MMX instructions on the form:
161 * pxxx mm1, mm2/mem64
162 * for instructions introduced with SSE.
163 */
164FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
165{
166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
167 if (IEM_IS_MODRM_REG_MODE(bRm))
168 {
169 /*
170 * MMX, MMX.
171 */
172 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
173 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
175 IEM_MC_BEGIN(2, 0);
176 IEM_MC_ARG(uint64_t *, pDst, 0);
177 IEM_MC_ARG(uint64_t const *, pSrc, 1);
178 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
179 IEM_MC_PREPARE_FPU_USAGE();
180 IEM_MC_FPU_TO_MMX_MODE();
181
182 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
183 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
184 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
185 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
186
187 IEM_MC_ADVANCE_RIP_AND_FINISH();
188 IEM_MC_END();
189 }
190 else
191 {
192 /*
193 * MMX, [mem64].
194 */
195 IEM_MC_BEGIN(2, 2);
196 IEM_MC_ARG(uint64_t *, pDst, 0);
197 IEM_MC_LOCAL(uint64_t, uSrc);
198 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200
201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
203 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
204 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
205
206 IEM_MC_PREPARE_FPU_USAGE();
207 IEM_MC_FPU_TO_MMX_MODE();
208
209 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
210 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
211 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
212
213 IEM_MC_ADVANCE_RIP_AND_FINISH();
214 IEM_MC_END();
215 }
216}
217
218
219/**
220 * Common worker for MMX instructions on the form:
221 * pxxx mm1, mm2/mem64
222 * for instructions introduced with SSE.
223 *
224 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
225 * no FXSAVE state, just the operands.
226 */
227FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
228{
229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
230 if (IEM_IS_MODRM_REG_MODE(bRm))
231 {
232 /*
233 * MMX, MMX.
234 */
235 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
236 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
238 IEM_MC_BEGIN(2, 0);
239 IEM_MC_ARG(uint64_t *, pDst, 0);
240 IEM_MC_ARG(uint64_t const *, pSrc, 1);
241 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
242 IEM_MC_PREPARE_FPU_USAGE();
243 IEM_MC_FPU_TO_MMX_MODE();
244
245 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
246 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
247 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
248 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
249
250 IEM_MC_ADVANCE_RIP_AND_FINISH();
251 IEM_MC_END();
252 }
253 else
254 {
255 /*
256 * MMX, [mem64].
257 */
258 IEM_MC_BEGIN(2, 2);
259 IEM_MC_ARG(uint64_t *, pDst, 0);
260 IEM_MC_LOCAL(uint64_t, uSrc);
261 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
263
264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
267 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
268
269 IEM_MC_PREPARE_FPU_USAGE();
270 IEM_MC_FPU_TO_MMX_MODE();
271
272 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
273 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
274 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
275
276 IEM_MC_ADVANCE_RIP_AND_FINISH();
277 IEM_MC_END();
278 }
279}
280
281
282/**
283 * Common worker for MMX instructions on the form:
284 * pxxx mm1, mm2/mem64
285 * that was introduced with SSE2.
286 */
287FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAF2U64, pfnU64)
288{
289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
290 if (IEM_IS_MODRM_REG_MODE(bRm))
291 {
292 /*
293 * MMX, MMX.
294 */
295 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
296 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
298 IEM_MC_BEGIN(2, 0);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_ARG(uint64_t const *, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
302 IEM_MC_PREPARE_FPU_USAGE();
303 IEM_MC_FPU_TO_MMX_MODE();
304
305 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
306 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
307 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
308 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
309
310 IEM_MC_ADVANCE_RIP_AND_FINISH();
311 IEM_MC_END();
312 }
313 else
314 {
315 /*
316 * MMX, [mem64].
317 */
318 IEM_MC_BEGIN(2, 2);
319 IEM_MC_ARG(uint64_t *, pDst, 0);
320 IEM_MC_LOCAL(uint64_t, uSrc);
321 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
323
324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
326 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
327 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
328
329 IEM_MC_PREPARE_FPU_USAGE();
330 IEM_MC_FPU_TO_MMX_MODE();
331
332 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
333 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
334 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
335
336 IEM_MC_ADVANCE_RIP_AND_FINISH();
337 IEM_MC_END();
338 }
339}
340
341
342/**
343 * Common worker for SSE instructions of the form:
344 * pxxx xmm1, xmm2/mem128
345 *
346 * Proper alignment of the 128-bit operand is enforced.
347 * SSE cpuid checks. No SIMD FP exceptions.
348 *
349 * @sa iemOpCommonSse2_FullFull_To_Full
350 */
351FNIEMOP_DEF_1(iemOpCommonSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
352{
353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
354 if (IEM_IS_MODRM_REG_MODE(bRm))
355 {
356 /*
357 * XMM, XMM.
358 */
359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
360 IEM_MC_BEGIN(2, 0);
361 IEM_MC_ARG(PRTUINT128U, pDst, 0);
362 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
364 IEM_MC_PREPARE_SSE_USAGE();
365 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
366 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
367 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
368 IEM_MC_ADVANCE_RIP_AND_FINISH();
369 IEM_MC_END();
370 }
371 else
372 {
373 /*
374 * XMM, [mem128].
375 */
376 IEM_MC_BEGIN(2, 2);
377 IEM_MC_ARG(PRTUINT128U, pDst, 0);
378 IEM_MC_LOCAL(RTUINT128U, uSrc);
379 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
381
382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
384 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
385 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
386
387 IEM_MC_PREPARE_SSE_USAGE();
388 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
389 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
390
391 IEM_MC_ADVANCE_RIP_AND_FINISH();
392 IEM_MC_END();
393 }
394}
395
396
397/**
398 * Common worker for SSE2 instructions on the forms:
399 * pxxx xmm1, xmm2/mem128
400 *
401 * Proper alignment of the 128-bit operand is enforced.
402 * Exceptions type 4. SSE2 cpuid checks.
403 *
404 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
405 */
406FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
407{
408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
409 if (IEM_IS_MODRM_REG_MODE(bRm))
410 {
411 /*
412 * XMM, XMM.
413 */
414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
415 IEM_MC_BEGIN(2, 0);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
418 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
419 IEM_MC_PREPARE_SSE_USAGE();
420 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
421 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
422 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
423 IEM_MC_ADVANCE_RIP_AND_FINISH();
424 IEM_MC_END();
425 }
426 else
427 {
428 /*
429 * XMM, [mem128].
430 */
431 IEM_MC_BEGIN(2, 2);
432 IEM_MC_ARG(PRTUINT128U, pDst, 0);
433 IEM_MC_LOCAL(RTUINT128U, uSrc);
434 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
436
437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
439 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
440 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
441
442 IEM_MC_PREPARE_SSE_USAGE();
443 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
444 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
445
446 IEM_MC_ADVANCE_RIP_AND_FINISH();
447 IEM_MC_END();
448 }
449}
450
451
452/**
453 * Common worker for SSE2 instructions on the forms:
454 * pxxx xmm1, xmm2/mem128
455 *
456 * Proper alignment of the 128-bit operand is enforced.
457 * Exceptions type 4. SSE2 cpuid checks.
458 *
459 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
460 * no FXSAVE state, just the operands.
461 *
462 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
463 */
464FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
465{
466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
467 if (IEM_IS_MODRM_REG_MODE(bRm))
468 {
469 /*
470 * XMM, XMM.
471 */
472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
473 IEM_MC_BEGIN(2, 0);
474 IEM_MC_ARG(PRTUINT128U, pDst, 0);
475 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
477 IEM_MC_PREPARE_SSE_USAGE();
478 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
479 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
480 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
481 IEM_MC_ADVANCE_RIP_AND_FINISH();
482 IEM_MC_END();
483 }
484 else
485 {
486 /*
487 * XMM, [mem128].
488 */
489 IEM_MC_BEGIN(2, 2);
490 IEM_MC_ARG(PRTUINT128U, pDst, 0);
491 IEM_MC_LOCAL(RTUINT128U, uSrc);
492 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
494
495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
498 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
499
500 IEM_MC_PREPARE_SSE_USAGE();
501 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
502 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
503
504 IEM_MC_ADVANCE_RIP_AND_FINISH();
505 IEM_MC_END();
506 }
507}
508
509
510/**
511 * Common worker for MMX instructions on the forms:
512 * pxxxx mm1, mm2/mem32
513 *
514 * The 2nd operand is the first half of a register, which in the memory case
515 * means a 32-bit memory access.
516 */
517FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
518{
519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
520 if (IEM_IS_MODRM_REG_MODE(bRm))
521 {
522 /*
523 * MMX, MMX.
524 */
525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
526 IEM_MC_BEGIN(2, 0);
527 IEM_MC_ARG(uint64_t *, puDst, 0);
528 IEM_MC_ARG(uint64_t const *, puSrc, 1);
529 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
530 IEM_MC_PREPARE_FPU_USAGE();
531 IEM_MC_FPU_TO_MMX_MODE();
532
533 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
534 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
535 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
536 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
537
538 IEM_MC_ADVANCE_RIP_AND_FINISH();
539 IEM_MC_END();
540 }
541 else
542 {
543 /*
544 * MMX, [mem32].
545 */
546 IEM_MC_BEGIN(2, 2);
547 IEM_MC_ARG(uint64_t *, puDst, 0);
548 IEM_MC_LOCAL(uint64_t, uSrc);
549 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
551
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
554 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
555 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
556
557 IEM_MC_PREPARE_FPU_USAGE();
558 IEM_MC_FPU_TO_MMX_MODE();
559
560 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
562 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
563
564 IEM_MC_ADVANCE_RIP_AND_FINISH();
565 IEM_MC_END();
566 }
567}
568
569
570/**
571 * Common worker for SSE instructions on the forms:
572 * pxxxx xmm1, xmm2/mem128
573 *
574 * The 2nd operand is the first half of a register, which in the memory case
575 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
576 *
577 * Exceptions type 4.
578 */
579FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
580{
581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
582 if (IEM_IS_MODRM_REG_MODE(bRm))
583 {
584 /*
585 * XMM, XMM.
586 */
587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
588 IEM_MC_BEGIN(2, 0);
589 IEM_MC_ARG(PRTUINT128U, puDst, 0);
590 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
591 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
594 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
595 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
596 IEM_MC_ADVANCE_RIP_AND_FINISH();
597 IEM_MC_END();
598 }
599 else
600 {
601 /*
602 * XMM, [mem128].
603 */
604 IEM_MC_BEGIN(2, 2);
605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
606 IEM_MC_LOCAL(RTUINT128U, uSrc);
607 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
609
610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
612 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
613 /** @todo Most CPUs probably only read the low qword. We read everything to
614 * make sure we apply segmentation and alignment checks correctly.
615 * When we have time, it would be interesting to explore what real
616 * CPUs actually does and whether it will do a TLB load for the high
617 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
618 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
619
620 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
621 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
623
624 IEM_MC_ADVANCE_RIP_AND_FINISH();
625 IEM_MC_END();
626 }
627}
628
629
630/**
631 * Common worker for SSE2 instructions on the forms:
632 * pxxxx xmm1, xmm2/mem128
633 *
634 * The 2nd operand is the first half of a register, which in the memory case
635 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
636 *
637 * Exceptions type 4.
638 */
639FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
640{
641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
642 if (IEM_IS_MODRM_REG_MODE(bRm))
643 {
644 /*
645 * XMM, XMM.
646 */
647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
648 IEM_MC_BEGIN(2, 0);
649 IEM_MC_ARG(PRTUINT128U, puDst, 0);
650 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
651 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
653 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
654 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
655 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
656 IEM_MC_ADVANCE_RIP_AND_FINISH();
657 IEM_MC_END();
658 }
659 else
660 {
661 /*
662 * XMM, [mem128].
663 */
664 IEM_MC_BEGIN(2, 2);
665 IEM_MC_ARG(PRTUINT128U, puDst, 0);
666 IEM_MC_LOCAL(RTUINT128U, uSrc);
667 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
669
670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
672 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
673 /** @todo Most CPUs probably only read the low qword. We read everything to
674 * make sure we apply segmentation and alignment checks correctly.
675 * When we have time, it would be interesting to explore what real
676 * CPUs actually does and whether it will do a TLB load for the high
677 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
678 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
679
680 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
681 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
682 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
683
684 IEM_MC_ADVANCE_RIP_AND_FINISH();
685 IEM_MC_END();
686 }
687}
688
689
690/**
691 * Common worker for MMX instructions on the form:
692 * pxxxx mm1, mm2/mem64
693 *
694 * The 2nd operand is the second half of a register, which in the memory case
695 * means a 64-bit memory access for MMX.
696 */
697FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
698{
699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
700 if (IEM_IS_MODRM_REG_MODE(bRm))
701 {
702 /*
703 * MMX, MMX.
704 */
705 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
706 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
708 IEM_MC_BEGIN(2, 0);
709 IEM_MC_ARG(uint64_t *, puDst, 0);
710 IEM_MC_ARG(uint64_t const *, puSrc, 1);
711 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
712 IEM_MC_PREPARE_FPU_USAGE();
713 IEM_MC_FPU_TO_MMX_MODE();
714
715 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
716 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
717 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
718 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
719
720 IEM_MC_ADVANCE_RIP_AND_FINISH();
721 IEM_MC_END();
722 }
723 else
724 {
725 /*
726 * MMX, [mem64].
727 */
728 IEM_MC_BEGIN(2, 2);
729 IEM_MC_ARG(uint64_t *, puDst, 0);
730 IEM_MC_LOCAL(uint64_t, uSrc);
731 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
733
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
736 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
737 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
738
739 IEM_MC_PREPARE_FPU_USAGE();
740 IEM_MC_FPU_TO_MMX_MODE();
741
742 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
743 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
744 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
745
746 IEM_MC_ADVANCE_RIP_AND_FINISH();
747 IEM_MC_END();
748 }
749}
750
751
752/**
753 * Common worker for SSE instructions on the form:
754 * pxxxx xmm1, xmm2/mem128
755 *
756 * The 2nd operand is the second half of a register, which for SSE a 128-bit
757 * aligned access where it may read the full 128 bits or only the upper 64 bits.
758 *
759 * Exceptions type 4.
760 */
761FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
762{
763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
764 if (IEM_IS_MODRM_REG_MODE(bRm))
765 {
766 /*
767 * XMM, XMM.
768 */
769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
770 IEM_MC_BEGIN(2, 0);
771 IEM_MC_ARG(PRTUINT128U, puDst, 0);
772 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
773 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
774 IEM_MC_PREPARE_SSE_USAGE();
775 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
776 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
777 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
778 IEM_MC_ADVANCE_RIP_AND_FINISH();
779 IEM_MC_END();
780 }
781 else
782 {
783 /*
784 * XMM, [mem128].
785 */
786 IEM_MC_BEGIN(2, 2);
787 IEM_MC_ARG(PRTUINT128U, puDst, 0);
788 IEM_MC_LOCAL(RTUINT128U, uSrc);
789 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
791
792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
794 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
795 /** @todo Most CPUs probably only read the high qword. We read everything to
796 * make sure we apply segmentation and alignment checks correctly.
797 * When we have time, it would be interesting to explore what real
798 * CPUs actually does and whether it will do a TLB load for the lower
799 * part or skip any associated \#PF. */
800 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801
802 IEM_MC_PREPARE_SSE_USAGE();
803 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
804 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
805
806 IEM_MC_ADVANCE_RIP_AND_FINISH();
807 IEM_MC_END();
808 }
809}
810
811
812/**
813 * Common worker for SSE instructions on the forms:
814 * pxxs xmm1, xmm2/mem128
815 *
816 * Proper alignment of the 128-bit operand is enforced.
817 * Exceptions type 2. SSE cpuid checks.
818 *
819 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
820 */
821FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
822{
823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
824 if (IEM_IS_MODRM_REG_MODE(bRm))
825 {
826 /*
827 * XMM128, XMM128.
828 */
829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
830 IEM_MC_BEGIN(3, 1);
831 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
832 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
833 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
834 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
835 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
836 IEM_MC_PREPARE_SSE_USAGE();
837 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
838 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
839 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
840 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
841 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
842
843 IEM_MC_ADVANCE_RIP_AND_FINISH();
844 IEM_MC_END();
845 }
846 else
847 {
848 /*
849 * XMM128, [mem128].
850 */
851 IEM_MC_BEGIN(3, 2);
852 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
853 IEM_MC_LOCAL(X86XMMREG, uSrc2);
854 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
855 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
856 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
858
859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
861 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
862 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
863
864 IEM_MC_PREPARE_SSE_USAGE();
865 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
866 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
867 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
868 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
869
870 IEM_MC_ADVANCE_RIP_AND_FINISH();
871 IEM_MC_END();
872 }
873}
874
875
876/**
877 * Common worker for SSE instructions on the forms:
878 * pxxs xmm1, xmm2/mem32
879 *
880 * Proper alignment of the 128-bit operand is enforced.
881 * Exceptions type 2. SSE cpuid checks.
882 *
883 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
884 */
885FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
886{
887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
888 if (IEM_IS_MODRM_REG_MODE(bRm))
889 {
890 /*
891 * XMM128, XMM32.
892 */
893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
894 IEM_MC_BEGIN(3, 1);
895 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
896 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
897 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
898 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
899 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
900 IEM_MC_PREPARE_SSE_USAGE();
901 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
902 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
903 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
904 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
905 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
906
907 IEM_MC_ADVANCE_RIP_AND_FINISH();
908 IEM_MC_END();
909 }
910 else
911 {
912 /*
913 * XMM128, [mem32].
914 */
915 IEM_MC_BEGIN(3, 2);
916 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
917 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
918 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
919 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
920 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
922
923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
926 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
927
928 IEM_MC_PREPARE_SSE_USAGE();
929 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
930 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
931 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
932 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
933
934 IEM_MC_ADVANCE_RIP_AND_FINISH();
935 IEM_MC_END();
936 }
937}
938
939
940/**
941 * Common worker for SSE2 instructions on the forms:
942 * pxxd xmm1, xmm2/mem128
943 *
944 * Proper alignment of the 128-bit operand is enforced.
945 * Exceptions type 2. SSE cpuid checks.
946 *
947 * @sa iemOpCommonSseFp_FullFull_To_Full
948 */
949FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
950{
951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
952 if (IEM_IS_MODRM_REG_MODE(bRm))
953 {
954 /*
955 * XMM128, XMM128.
956 */
957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
958 IEM_MC_BEGIN(3, 1);
959 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
960 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
961 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
962 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
963 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
964 IEM_MC_PREPARE_SSE_USAGE();
965 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
966 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
967 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
968 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
969 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
970
971 IEM_MC_ADVANCE_RIP_AND_FINISH();
972 IEM_MC_END();
973 }
974 else
975 {
976 /*
977 * XMM128, [mem128].
978 */
979 IEM_MC_BEGIN(3, 2);
980 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
981 IEM_MC_LOCAL(X86XMMREG, uSrc2);
982 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
983 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
984 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
989 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
990 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
991
992 IEM_MC_PREPARE_SSE_USAGE();
993 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
994 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
995 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
996 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
997
998 IEM_MC_ADVANCE_RIP_AND_FINISH();
999 IEM_MC_END();
1000 }
1001}
1002
1003
1004/**
1005 * Common worker for SSE2 instructions on the forms:
1006 * pxxs xmm1, xmm2/mem64
1007 *
1008 * Proper alignment of the 128-bit operand is enforced.
1009 * Exceptions type 2. SSE2 cpuid checks.
1010 *
1011 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1012 */
1013FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
1014{
1015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1016 if (IEM_IS_MODRM_REG_MODE(bRm))
1017 {
1018 /*
1019 * XMM, XMM.
1020 */
1021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1022 IEM_MC_BEGIN(3, 1);
1023 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1024 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1025 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1026 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
1027 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1028 IEM_MC_PREPARE_SSE_USAGE();
1029 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1031 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
1032 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1033 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1034
1035 IEM_MC_ADVANCE_RIP_AND_FINISH();
1036 IEM_MC_END();
1037 }
1038 else
1039 {
1040 /*
1041 * XMM, [mem64].
1042 */
1043 IEM_MC_BEGIN(3, 2);
1044 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1045 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
1046 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1047 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1048 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
1049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1050
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1053 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1054 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1055
1056 IEM_MC_PREPARE_SSE_USAGE();
1057 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1058 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
1059 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1060 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1061
1062 IEM_MC_ADVANCE_RIP_AND_FINISH();
1063 IEM_MC_END();
1064 }
1065}
1066
1067
1068/**
1069 * Common worker for SSE2 instructions on the form:
1070 * pxxxx xmm1, xmm2/mem128
1071 *
1072 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1073 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1074 *
1075 * Exceptions type 4.
1076 */
1077FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1078{
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /*
1083 * XMM, XMM.
1084 */
1085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1086 IEM_MC_BEGIN(2, 0);
1087 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1088 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1089 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1090 IEM_MC_PREPARE_SSE_USAGE();
1091 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1092 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1093 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1094 IEM_MC_ADVANCE_RIP_AND_FINISH();
1095 IEM_MC_END();
1096 }
1097 else
1098 {
1099 /*
1100 * XMM, [mem128].
1101 */
1102 IEM_MC_BEGIN(2, 2);
1103 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1104 IEM_MC_LOCAL(RTUINT128U, uSrc);
1105 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1107
1108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1110 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1111 /** @todo Most CPUs probably only read the high qword. We read everything to
1112 * make sure we apply segmentation and alignment checks correctly.
1113 * When we have time, it would be interesting to explore what real
1114 * CPUs actually does and whether it will do a TLB load for the lower
1115 * part or skip any associated \#PF. */
1116 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1117
1118 IEM_MC_PREPARE_SSE_USAGE();
1119 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1120 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1121
1122 IEM_MC_ADVANCE_RIP_AND_FINISH();
1123 IEM_MC_END();
1124 }
1125}
1126
1127
1128/**
1129 * Common worker for SSE3 instructions on the forms:
1130 * hxxx xmm1, xmm2/mem128
1131 *
1132 * Proper alignment of the 128-bit operand is enforced.
1133 * Exceptions type 2. SSE3 cpuid checks.
1134 *
1135 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1136 */
1137FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1138{
1139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1140 if (IEM_IS_MODRM_REG_MODE(bRm))
1141 {
1142 /*
1143 * XMM, XMM.
1144 */
1145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1146 IEM_MC_BEGIN(3, 1);
1147 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1148 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1149 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1150 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1151 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1152 IEM_MC_PREPARE_SSE_USAGE();
1153 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1154 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1155 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1156 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1157 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1158
1159 IEM_MC_ADVANCE_RIP_AND_FINISH();
1160 IEM_MC_END();
1161 }
1162 else
1163 {
1164 /*
1165 * XMM, [mem128].
1166 */
1167 IEM_MC_BEGIN(3, 2);
1168 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1169 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1170 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1171 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1172 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1174
1175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1177 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1178 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1179
1180 IEM_MC_PREPARE_SSE_USAGE();
1181 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1182 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1183 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1184 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1185
1186 IEM_MC_ADVANCE_RIP_AND_FINISH();
1187 IEM_MC_END();
1188 }
1189}
1190
1191
1192/** Opcode 0x0f 0x00 /0. */
1193FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1194{
1195 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1196 IEMOP_HLP_MIN_286();
1197 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1198
1199 if (IEM_IS_MODRM_REG_MODE(bRm))
1200 {
1201 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1202 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1203 }
1204
1205 /* Ignore operand size here, memory refs are always 16-bit. */
1206 IEM_MC_BEGIN(2, 0);
1207 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1208 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1210 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1211 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1212 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1213 IEM_MC_END();
1214}
1215
1216
1217/** Opcode 0x0f 0x00 /1. */
1218FNIEMOPRM_DEF(iemOp_Grp6_str)
1219{
1220 IEMOP_MNEMONIC(str, "str Rv/Mw");
1221 IEMOP_HLP_MIN_286();
1222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1223
1224
1225 if (IEM_IS_MODRM_REG_MODE(bRm))
1226 {
1227 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1228 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1229 }
1230
1231 /* Ignore operand size here, memory refs are always 16-bit. */
1232 IEM_MC_BEGIN(2, 0);
1233 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1234 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1236 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1237 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1238 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1239 IEM_MC_END();
1240}
1241
1242
1243/** Opcode 0x0f 0x00 /2. */
1244FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1245{
1246 IEMOP_MNEMONIC(lldt, "lldt Ew");
1247 IEMOP_HLP_MIN_286();
1248 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1249
1250 if (IEM_IS_MODRM_REG_MODE(bRm))
1251 {
1252 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1253 IEM_MC_BEGIN(1, 0);
1254 IEM_MC_ARG(uint16_t, u16Sel, 0);
1255 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1256 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_lldt, u16Sel);
1257 IEM_MC_END();
1258 }
1259 else
1260 {
1261 IEM_MC_BEGIN(1, 1);
1262 IEM_MC_ARG(uint16_t, u16Sel, 0);
1263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1265 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1266 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1267 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1268 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_lldt, u16Sel);
1269 IEM_MC_END();
1270 }
1271}
1272
1273
1274/** Opcode 0x0f 0x00 /3. */
1275FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1276{
1277 IEMOP_MNEMONIC(ltr, "ltr Ew");
1278 IEMOP_HLP_MIN_286();
1279 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1280
1281 if (IEM_IS_MODRM_REG_MODE(bRm))
1282 {
1283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1284 IEM_MC_BEGIN(1, 0);
1285 IEM_MC_ARG(uint16_t, u16Sel, 0);
1286 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1287 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_ltr, u16Sel);
1288 IEM_MC_END();
1289 }
1290 else
1291 {
1292 IEM_MC_BEGIN(1, 1);
1293 IEM_MC_ARG(uint16_t, u16Sel, 0);
1294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1297 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1298 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1299 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_ltr, u16Sel);
1300 IEM_MC_END();
1301 }
1302}
1303
1304
1305/** Opcode 0x0f 0x00 /3. */
1306FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1307{
1308 IEMOP_HLP_MIN_286();
1309 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1310
1311 if (IEM_IS_MODRM_REG_MODE(bRm))
1312 {
1313 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1314 IEM_MC_BEGIN(2, 0);
1315 IEM_MC_ARG(uint16_t, u16Sel, 0);
1316 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1317 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1318 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_VerX, u16Sel, fWriteArg);
1319 IEM_MC_END();
1320 }
1321 else
1322 {
1323 IEM_MC_BEGIN(2, 1);
1324 IEM_MC_ARG(uint16_t, u16Sel, 0);
1325 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1328 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1329 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1330 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_VerX, u16Sel, fWriteArg);
1331 IEM_MC_END();
1332 }
1333}
1334
1335
1336/** Opcode 0x0f 0x00 /4. */
1337FNIEMOPRM_DEF(iemOp_Grp6_verr)
1338{
1339 IEMOP_MNEMONIC(verr, "verr Ew");
1340 IEMOP_HLP_MIN_286();
1341 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1342}
1343
1344
1345/** Opcode 0x0f 0x00 /5. */
1346FNIEMOPRM_DEF(iemOp_Grp6_verw)
1347{
1348 IEMOP_MNEMONIC(verw, "verw Ew");
1349 IEMOP_HLP_MIN_286();
1350 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1351}
1352
1353
1354/**
1355 * Group 6 jump table.
1356 */
1357IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1358{
1359 iemOp_Grp6_sldt,
1360 iemOp_Grp6_str,
1361 iemOp_Grp6_lldt,
1362 iemOp_Grp6_ltr,
1363 iemOp_Grp6_verr,
1364 iemOp_Grp6_verw,
1365 iemOp_InvalidWithRM,
1366 iemOp_InvalidWithRM
1367};
1368
1369/** Opcode 0x0f 0x00. */
1370FNIEMOP_DEF(iemOp_Grp6)
1371{
1372 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1373 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1374}
1375
1376
1377/** Opcode 0x0f 0x01 /0. */
1378FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1379{
1380 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1381 IEMOP_HLP_MIN_286();
1382 IEMOP_HLP_64BIT_OP_SIZE();
1383 IEM_MC_BEGIN(2, 1);
1384 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1385 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1389 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1390 IEM_MC_END();
1391}
1392
1393
1394/** Opcode 0x0f 0x01 /0. */
1395FNIEMOP_DEF(iemOp_Grp7_vmcall)
1396{
1397 IEMOP_MNEMONIC(vmcall, "vmcall");
1398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1399
1400 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1401 want all hypercalls regardless of instruction used, and if a
1402 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1403 (NEM/win makes ASSUMPTIONS about this behavior.) */
1404 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_vmcall);
1405}
1406
1407
1408/** Opcode 0x0f 0x01 /0. */
1409#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1410FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1411{
1412 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1413 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1414 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1415 IEMOP_HLP_DONE_DECODING();
1416 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
1417 iemCImpl_vmlaunch);
1418}
1419#else
1420FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1421{
1422 IEMOP_BITCH_ABOUT_STUB();
1423 IEMOP_RAISE_INVALID_OPCODE_RET();
1424}
1425#endif
1426
1427
1428/** Opcode 0x0f 0x01 /0. */
1429#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1430FNIEMOP_DEF(iemOp_Grp7_vmresume)
1431{
1432 IEMOP_MNEMONIC(vmresume, "vmresume");
1433 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1434 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1435 IEMOP_HLP_DONE_DECODING();
1436 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
1437 iemCImpl_vmresume);
1438}
1439#else
1440FNIEMOP_DEF(iemOp_Grp7_vmresume)
1441{
1442 IEMOP_BITCH_ABOUT_STUB();
1443 IEMOP_RAISE_INVALID_OPCODE_RET();
1444}
1445#endif
1446
1447
1448/** Opcode 0x0f 0x01 /0. */
1449#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1450FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1451{
1452 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1453 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1454 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1455 IEMOP_HLP_DONE_DECODING();
1456 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmxoff);
1457}
1458#else
1459FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1460{
1461 IEMOP_BITCH_ABOUT_STUB();
1462 IEMOP_RAISE_INVALID_OPCODE_RET();
1463}
1464#endif
1465
1466
1467/** Opcode 0x0f 0x01 /1. */
1468FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1469{
1470 IEMOP_MNEMONIC(sidt, "sidt Ms");
1471 IEMOP_HLP_MIN_286();
1472 IEMOP_HLP_64BIT_OP_SIZE();
1473 IEM_MC_BEGIN(2, 1);
1474 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1475 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1478 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1479 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1480 IEM_MC_END();
1481}
1482
1483
1484/** Opcode 0x0f 0x01 /1. */
1485FNIEMOP_DEF(iemOp_Grp7_monitor)
1486{
1487 IEMOP_MNEMONIC(monitor, "monitor");
1488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1489 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1490}
1491
1492
1493/** Opcode 0x0f 0x01 /1. */
1494FNIEMOP_DEF(iemOp_Grp7_mwait)
1495{
1496 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1498 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_mwait);
1499}
1500
1501
1502/** Opcode 0x0f 0x01 /2. */
1503FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1504{
1505 IEMOP_MNEMONIC(lgdt, "lgdt");
1506 IEMOP_HLP_64BIT_OP_SIZE();
1507 IEM_MC_BEGIN(3, 1);
1508 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1509 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1510 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1513 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1514 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1515 IEM_MC_END();
1516}
1517
1518
1519/** Opcode 0x0f 0x01 0xd0. */
1520FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1521{
1522 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1523 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1524 {
1525 /** @todo r=ramshankar: We should use
1526 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1527 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1528 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1529 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_xgetbv);
1530 }
1531 IEMOP_RAISE_INVALID_OPCODE_RET();
1532}
1533
1534
1535/** Opcode 0x0f 0x01 0xd1. */
1536FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1537{
1538 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1539 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1540 {
1541 /** @todo r=ramshankar: We should use
1542 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1543 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1544 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1545 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_xsetbv);
1546 }
1547 IEMOP_RAISE_INVALID_OPCODE_RET();
1548}
1549
1550
1551/** Opcode 0x0f 0x01 /3. */
1552FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1553{
1554 IEMOP_MNEMONIC(lidt, "lidt");
1555 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1556 IEM_MC_BEGIN(3, 1);
1557 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1558 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1559 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1562 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1563 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1564 IEM_MC_END();
1565}
1566
1567
1568/** Opcode 0x0f 0x01 0xd8. */
1569#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1570FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1571{
1572 IEMOP_MNEMONIC(vmrun, "vmrun");
1573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1574 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
1575 iemCImpl_vmrun);
1576}
1577#else
1578FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1579#endif
1580
1581/** Opcode 0x0f 0x01 0xd9. */
1582FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1583{
1584 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1586
1587 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1588 want all hypercalls regardless of instruction used, and if a
1589 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1590 (NEM/win makes ASSUMPTIONS about this behavior.) */
1591 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmmcall);
1592}
1593
1594/** Opcode 0x0f 0x01 0xda. */
1595#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1596FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1597{
1598 IEMOP_MNEMONIC(vmload, "vmload");
1599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1600 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmload);
1601}
1602#else
1603FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1604#endif
1605
1606
1607/** Opcode 0x0f 0x01 0xdb. */
1608#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1609FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1610{
1611 IEMOP_MNEMONIC(vmsave, "vmsave");
1612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1613 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmsave);
1614}
1615#else
1616FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1617#endif
1618
1619
1620/** Opcode 0x0f 0x01 0xdc. */
1621#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1622FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1623{
1624 IEMOP_MNEMONIC(stgi, "stgi");
1625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1626 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_stgi);
1627}
1628#else
1629FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1630#endif
1631
1632
1633/** Opcode 0x0f 0x01 0xdd. */
1634#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1635FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1636{
1637 IEMOP_MNEMONIC(clgi, "clgi");
1638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1639 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_clgi);
1640}
1641#else
1642FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1643#endif
1644
1645
1646/** Opcode 0x0f 0x01 0xdf. */
1647#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1648FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1649{
1650 IEMOP_MNEMONIC(invlpga, "invlpga");
1651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1652 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_invlpga);
1653}
1654#else
1655FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1656#endif
1657
1658
1659/** Opcode 0x0f 0x01 0xde. */
1660#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1661FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1662{
1663 IEMOP_MNEMONIC(skinit, "skinit");
1664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1665 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_skinit);
1666}
1667#else
1668FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1669#endif
1670
1671
1672/** Opcode 0x0f 0x01 /4. */
1673FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1674{
1675 IEMOP_MNEMONIC(smsw, "smsw");
1676 IEMOP_HLP_MIN_286();
1677 if (IEM_IS_MODRM_REG_MODE(bRm))
1678 {
1679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1680 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1681 }
1682
1683 /* Ignore operand size here, memory refs are always 16-bit. */
1684 IEM_MC_BEGIN(2, 0);
1685 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1686 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1689 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1690 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1691 IEM_MC_END();
1692}
1693
1694
1695/** Opcode 0x0f 0x01 /6. */
1696FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1697{
1698 /* The operand size is effectively ignored, all is 16-bit and only the
1699 lower 3-bits are used. */
1700 IEMOP_MNEMONIC(lmsw, "lmsw");
1701 IEMOP_HLP_MIN_286();
1702 if (IEM_IS_MODRM_REG_MODE(bRm))
1703 {
1704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1705 IEM_MC_BEGIN(2, 0);
1706 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1707 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1708 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1709 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1710 IEM_MC_END();
1711 }
1712 else
1713 {
1714 IEM_MC_BEGIN(2, 0);
1715 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1716 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1719 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1720 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1721 IEM_MC_END();
1722 }
1723}
1724
1725
1726/** Opcode 0x0f 0x01 /7. */
1727FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1728{
1729 IEMOP_MNEMONIC(invlpg, "invlpg");
1730 IEMOP_HLP_MIN_486();
1731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1732 IEM_MC_BEGIN(1, 1);
1733 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1735 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_invlpg, GCPtrEffDst);
1736 IEM_MC_END();
1737}
1738
1739
1740/** Opcode 0x0f 0x01 0xf8. */
1741FNIEMOP_DEF(iemOp_Grp7_swapgs)
1742{
1743 IEMOP_MNEMONIC(swapgs, "swapgs");
1744 IEMOP_HLP_ONLY_64BIT();
1745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1746 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_swapgs);
1747}
1748
1749
1750/** Opcode 0x0f 0x01 0xf9. */
1751FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1752{
1753 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1755 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdtscp);
1756}
1757
1758
1759/**
1760 * Group 7 jump table, memory variant.
1761 */
1762IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1763{
1764 iemOp_Grp7_sgdt,
1765 iemOp_Grp7_sidt,
1766 iemOp_Grp7_lgdt,
1767 iemOp_Grp7_lidt,
1768 iemOp_Grp7_smsw,
1769 iemOp_InvalidWithRM,
1770 iemOp_Grp7_lmsw,
1771 iemOp_Grp7_invlpg
1772};
1773
1774
1775/** Opcode 0x0f 0x01. */
1776FNIEMOP_DEF(iemOp_Grp7)
1777{
1778 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1779 if (IEM_IS_MODRM_MEM_MODE(bRm))
1780 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1781
1782 switch (IEM_GET_MODRM_REG_8(bRm))
1783 {
1784 case 0:
1785 switch (IEM_GET_MODRM_RM_8(bRm))
1786 {
1787 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1788 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1789 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1790 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1791 }
1792 IEMOP_RAISE_INVALID_OPCODE_RET();
1793
1794 case 1:
1795 switch (IEM_GET_MODRM_RM_8(bRm))
1796 {
1797 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1798 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1799 }
1800 IEMOP_RAISE_INVALID_OPCODE_RET();
1801
1802 case 2:
1803 switch (IEM_GET_MODRM_RM_8(bRm))
1804 {
1805 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1806 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1807 }
1808 IEMOP_RAISE_INVALID_OPCODE_RET();
1809
1810 case 3:
1811 switch (IEM_GET_MODRM_RM_8(bRm))
1812 {
1813 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1814 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1815 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1816 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1817 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1818 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1819 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1820 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1821 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1822 }
1823
1824 case 4:
1825 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1826
1827 case 5:
1828 IEMOP_RAISE_INVALID_OPCODE_RET();
1829
1830 case 6:
1831 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1832
1833 case 7:
1834 switch (IEM_GET_MODRM_RM_8(bRm))
1835 {
1836 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1837 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1838 }
1839 IEMOP_RAISE_INVALID_OPCODE_RET();
1840
1841 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1842 }
1843}
1844
1845/** Opcode 0x0f 0x00 /3. */
1846FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1847{
1848 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1849 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1850
1851 if (IEM_IS_MODRM_REG_MODE(bRm))
1852 {
1853 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1854 switch (pVCpu->iem.s.enmEffOpSize)
1855 {
1856 case IEMMODE_16BIT:
1857 {
1858 IEM_MC_BEGIN(3, 0);
1859 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1860 IEM_MC_ARG(uint16_t, u16Sel, 1);
1861 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1862
1863 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1864 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1865 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1866
1867 IEM_MC_END();
1868 }
1869
1870 case IEMMODE_32BIT:
1871 case IEMMODE_64BIT:
1872 {
1873 IEM_MC_BEGIN(3, 0);
1874 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1875 IEM_MC_ARG(uint16_t, u16Sel, 1);
1876 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1877
1878 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1879 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1880 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1881
1882 IEM_MC_END();
1883 }
1884
1885 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1886 }
1887 }
1888 else
1889 {
1890 switch (pVCpu->iem.s.enmEffOpSize)
1891 {
1892 case IEMMODE_16BIT:
1893 {
1894 IEM_MC_BEGIN(3, 1);
1895 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1896 IEM_MC_ARG(uint16_t, u16Sel, 1);
1897 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1899
1900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1901 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1902
1903 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1904 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1905 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1906
1907 IEM_MC_END();
1908 }
1909
1910 case IEMMODE_32BIT:
1911 case IEMMODE_64BIT:
1912 {
1913 IEM_MC_BEGIN(3, 1);
1914 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1915 IEM_MC_ARG(uint16_t, u16Sel, 1);
1916 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1917 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1918
1919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1920 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1921/** @todo testcase: make sure it's a 16-bit read. */
1922
1923 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1924 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1925 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1926
1927 IEM_MC_END();
1928 }
1929
1930 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1931 }
1932 }
1933}
1934
1935
1936
1937/** Opcode 0x0f 0x02. */
1938FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1939{
1940 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1941 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1942}
1943
1944
1945/** Opcode 0x0f 0x03. */
1946FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1947{
1948 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1949 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1950}
1951
1952
1953/** Opcode 0x0f 0x05. */
1954FNIEMOP_DEF(iemOp_syscall)
1955{
1956 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1958 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1959 iemCImpl_syscall);
1960}
1961
1962
1963/** Opcode 0x0f 0x06. */
1964FNIEMOP_DEF(iemOp_clts)
1965{
1966 IEMOP_MNEMONIC(clts, "clts");
1967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1968 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_clts);
1969}
1970
1971
1972/** Opcode 0x0f 0x07. */
1973FNIEMOP_DEF(iemOp_sysret)
1974{
1975 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1977 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1978 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
1979}
1980
1981
1982/** Opcode 0x0f 0x08. */
1983FNIEMOP_DEF(iemOp_invd)
1984{
1985 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1986 IEMOP_HLP_MIN_486();
1987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1988 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_invd);
1989}
1990
1991
1992/** Opcode 0x0f 0x09. */
1993FNIEMOP_DEF(iemOp_wbinvd)
1994{
1995 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1996 IEMOP_HLP_MIN_486();
1997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1998 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_wbinvd);
1999}
2000
2001
2002/** Opcode 0x0f 0x0b. */
2003FNIEMOP_DEF(iemOp_ud2)
2004{
2005 IEMOP_MNEMONIC(ud2, "ud2");
2006 IEMOP_RAISE_INVALID_OPCODE_RET();
2007}
2008
2009/** Opcode 0x0f 0x0d. */
2010FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
2011{
2012 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
2013 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
2014 {
2015 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
2016 IEMOP_RAISE_INVALID_OPCODE_RET();
2017 }
2018
2019 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2020 if (IEM_IS_MODRM_REG_MODE(bRm))
2021 {
2022 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
2023 IEMOP_RAISE_INVALID_OPCODE_RET();
2024 }
2025
2026 switch (IEM_GET_MODRM_REG_8(bRm))
2027 {
2028 case 2: /* Aliased to /0 for the time being. */
2029 case 4: /* Aliased to /0 for the time being. */
2030 case 5: /* Aliased to /0 for the time being. */
2031 case 6: /* Aliased to /0 for the time being. */
2032 case 7: /* Aliased to /0 for the time being. */
2033 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2034 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2035 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2037 }
2038
2039 IEM_MC_BEGIN(0, 1);
2040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2043 /* Currently a NOP. */
2044 NOREF(GCPtrEffSrc);
2045 IEM_MC_ADVANCE_RIP_AND_FINISH();
2046 IEM_MC_END();
2047}
2048
2049
2050/** Opcode 0x0f 0x0e. */
2051FNIEMOP_DEF(iemOp_femms)
2052{
2053 IEMOP_MNEMONIC(femms, "femms");
2054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2055
2056 IEM_MC_BEGIN(0,0);
2057 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2058 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2059 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2060 IEM_MC_FPU_FROM_MMX_MODE();
2061 IEM_MC_ADVANCE_RIP_AND_FINISH();
2062 IEM_MC_END();
2063}
2064
2065
2066/** Opcode 0x0f 0x0f. */
2067FNIEMOP_DEF(iemOp_3Dnow)
2068{
2069 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2070 {
2071 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2072 IEMOP_RAISE_INVALID_OPCODE_RET();
2073 }
2074
2075#ifdef IEM_WITH_3DNOW
2076 /* This is pretty sparse, use switch instead of table. */
2077 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2078 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2079#else
2080 IEMOP_BITCH_ABOUT_STUB();
2081 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2082#endif
2083}
2084
2085
2086/**
2087 * @opcode 0x10
2088 * @oppfx none
2089 * @opcpuid sse
2090 * @opgroup og_sse_simdfp_datamove
2091 * @opxcpttype 4UA
2092 * @optest op1=1 op2=2 -> op1=2
2093 * @optest op1=0 op2=-22 -> op1=-22
2094 */
2095FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2096{
2097 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2099 if (IEM_IS_MODRM_REG_MODE(bRm))
2100 {
2101 /*
2102 * XMM128, XMM128.
2103 */
2104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2105 IEM_MC_BEGIN(0, 0);
2106 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2107 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2108 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2109 IEM_GET_MODRM_RM(pVCpu, bRm));
2110 IEM_MC_ADVANCE_RIP_AND_FINISH();
2111 IEM_MC_END();
2112 }
2113 else
2114 {
2115 /*
2116 * XMM128, [mem128].
2117 */
2118 IEM_MC_BEGIN(0, 2);
2119 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2121
2122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2124 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2125 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2126
2127 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2128 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2129
2130 IEM_MC_ADVANCE_RIP_AND_FINISH();
2131 IEM_MC_END();
2132 }
2133
2134}
2135
2136
2137/**
2138 * @opcode 0x10
2139 * @oppfx 0x66
2140 * @opcpuid sse2
2141 * @opgroup og_sse2_pcksclr_datamove
2142 * @opxcpttype 4UA
2143 * @optest op1=1 op2=2 -> op1=2
2144 * @optest op1=0 op2=-42 -> op1=-42
2145 */
2146FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2147{
2148 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2149 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2150 if (IEM_IS_MODRM_REG_MODE(bRm))
2151 {
2152 /*
2153 * XMM128, XMM128.
2154 */
2155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2156 IEM_MC_BEGIN(0, 0);
2157 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2158 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2159 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2160 IEM_GET_MODRM_RM(pVCpu, bRm));
2161 IEM_MC_ADVANCE_RIP_AND_FINISH();
2162 IEM_MC_END();
2163 }
2164 else
2165 {
2166 /*
2167 * XMM128, [mem128].
2168 */
2169 IEM_MC_BEGIN(0, 2);
2170 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2172
2173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2175 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2176 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2177
2178 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2179 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2180
2181 IEM_MC_ADVANCE_RIP_AND_FINISH();
2182 IEM_MC_END();
2183 }
2184}
2185
2186
2187/**
2188 * @opcode 0x10
2189 * @oppfx 0xf3
2190 * @opcpuid sse
2191 * @opgroup og_sse_simdfp_datamove
2192 * @opxcpttype 5
2193 * @optest op1=1 op2=2 -> op1=2
2194 * @optest op1=0 op2=-22 -> op1=-22
2195 */
2196FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2197{
2198 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2199 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2200 if (IEM_IS_MODRM_REG_MODE(bRm))
2201 {
2202 /*
2203 * XMM32, XMM32.
2204 */
2205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2206 IEM_MC_BEGIN(0, 1);
2207 IEM_MC_LOCAL(uint32_t, uSrc);
2208
2209 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2210 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2211 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2212 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2213
2214 IEM_MC_ADVANCE_RIP_AND_FINISH();
2215 IEM_MC_END();
2216 }
2217 else
2218 {
2219 /*
2220 * XMM128, [mem32].
2221 */
2222 IEM_MC_BEGIN(0, 2);
2223 IEM_MC_LOCAL(uint32_t, uSrc);
2224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2225
2226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2228 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2229 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2230
2231 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2232 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2233
2234 IEM_MC_ADVANCE_RIP_AND_FINISH();
2235 IEM_MC_END();
2236 }
2237}
2238
2239
2240/**
2241 * @opcode 0x10
2242 * @oppfx 0xf2
2243 * @opcpuid sse2
2244 * @opgroup og_sse2_pcksclr_datamove
2245 * @opxcpttype 5
2246 * @optest op1=1 op2=2 -> op1=2
2247 * @optest op1=0 op2=-42 -> op1=-42
2248 */
2249FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2250{
2251 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2252 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2253 if (IEM_IS_MODRM_REG_MODE(bRm))
2254 {
2255 /*
2256 * XMM64, XMM64.
2257 */
2258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2259 IEM_MC_BEGIN(0, 1);
2260 IEM_MC_LOCAL(uint64_t, uSrc);
2261
2262 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2263 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2264 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2265 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2266
2267 IEM_MC_ADVANCE_RIP_AND_FINISH();
2268 IEM_MC_END();
2269 }
2270 else
2271 {
2272 /*
2273 * XMM128, [mem64].
2274 */
2275 IEM_MC_BEGIN(0, 2);
2276 IEM_MC_LOCAL(uint64_t, uSrc);
2277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2278
2279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2281 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2282 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2283
2284 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2285 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2286
2287 IEM_MC_ADVANCE_RIP_AND_FINISH();
2288 IEM_MC_END();
2289 }
2290}
2291
2292
2293/**
2294 * @opcode 0x11
2295 * @oppfx none
2296 * @opcpuid sse
2297 * @opgroup og_sse_simdfp_datamove
2298 * @opxcpttype 4UA
2299 * @optest op1=1 op2=2 -> op1=2
2300 * @optest op1=0 op2=-42 -> op1=-42
2301 */
2302FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2303{
2304 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2306 if (IEM_IS_MODRM_REG_MODE(bRm))
2307 {
2308 /*
2309 * XMM128, XMM128.
2310 */
2311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2312 IEM_MC_BEGIN(0, 0);
2313 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2314 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2315 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2316 IEM_GET_MODRM_REG(pVCpu, bRm));
2317 IEM_MC_ADVANCE_RIP_AND_FINISH();
2318 IEM_MC_END();
2319 }
2320 else
2321 {
2322 /*
2323 * [mem128], XMM128.
2324 */
2325 IEM_MC_BEGIN(0, 2);
2326 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2328
2329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2331 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2332 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2333
2334 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2335 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2336
2337 IEM_MC_ADVANCE_RIP_AND_FINISH();
2338 IEM_MC_END();
2339 }
2340}
2341
2342
2343/**
2344 * @opcode 0x11
2345 * @oppfx 0x66
2346 * @opcpuid sse2
2347 * @opgroup og_sse2_pcksclr_datamove
2348 * @opxcpttype 4UA
2349 * @optest op1=1 op2=2 -> op1=2
2350 * @optest op1=0 op2=-42 -> op1=-42
2351 */
2352FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2353{
2354 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2356 if (IEM_IS_MODRM_REG_MODE(bRm))
2357 {
2358 /*
2359 * XMM128, XMM128.
2360 */
2361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2362 IEM_MC_BEGIN(0, 0);
2363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2364 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2365 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2366 IEM_GET_MODRM_REG(pVCpu, bRm));
2367 IEM_MC_ADVANCE_RIP_AND_FINISH();
2368 IEM_MC_END();
2369 }
2370 else
2371 {
2372 /*
2373 * [mem128], XMM128.
2374 */
2375 IEM_MC_BEGIN(0, 2);
2376 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2378
2379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2381 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2382 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2383
2384 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2385 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2386
2387 IEM_MC_ADVANCE_RIP_AND_FINISH();
2388 IEM_MC_END();
2389 }
2390}
2391
2392
2393/**
2394 * @opcode 0x11
2395 * @oppfx 0xf3
2396 * @opcpuid sse
2397 * @opgroup og_sse_simdfp_datamove
2398 * @opxcpttype 5
2399 * @optest op1=1 op2=2 -> op1=2
2400 * @optest op1=0 op2=-22 -> op1=-22
2401 */
2402FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2403{
2404 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2406 if (IEM_IS_MODRM_REG_MODE(bRm))
2407 {
2408 /*
2409 * XMM32, XMM32.
2410 */
2411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2412 IEM_MC_BEGIN(0, 1);
2413 IEM_MC_LOCAL(uint32_t, uSrc);
2414
2415 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2416 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2417 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2418 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2419
2420 IEM_MC_ADVANCE_RIP_AND_FINISH();
2421 IEM_MC_END();
2422 }
2423 else
2424 {
2425 /*
2426 * [mem32], XMM32.
2427 */
2428 IEM_MC_BEGIN(0, 2);
2429 IEM_MC_LOCAL(uint32_t, uSrc);
2430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2431
2432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2434 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2436
2437 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2438 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2439
2440 IEM_MC_ADVANCE_RIP_AND_FINISH();
2441 IEM_MC_END();
2442 }
2443}
2444
2445
2446/**
2447 * @opcode 0x11
2448 * @oppfx 0xf2
2449 * @opcpuid sse2
2450 * @opgroup og_sse2_pcksclr_datamove
2451 * @opxcpttype 5
2452 * @optest op1=1 op2=2 -> op1=2
2453 * @optest op1=0 op2=-42 -> op1=-42
2454 */
2455FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2456{
2457 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2458 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2459 if (IEM_IS_MODRM_REG_MODE(bRm))
2460 {
2461 /*
2462 * XMM64, XMM64.
2463 */
2464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2465 IEM_MC_BEGIN(0, 1);
2466 IEM_MC_LOCAL(uint64_t, uSrc);
2467
2468 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2469 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2470 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2471 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2472
2473 IEM_MC_ADVANCE_RIP_AND_FINISH();
2474 IEM_MC_END();
2475 }
2476 else
2477 {
2478 /*
2479 * [mem64], XMM64.
2480 */
2481 IEM_MC_BEGIN(0, 2);
2482 IEM_MC_LOCAL(uint64_t, uSrc);
2483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2484
2485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2487 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2488 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2489
2490 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2491 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2492
2493 IEM_MC_ADVANCE_RIP_AND_FINISH();
2494 IEM_MC_END();
2495 }
2496}
2497
2498
2499FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2500{
2501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2502 if (IEM_IS_MODRM_REG_MODE(bRm))
2503 {
2504 /**
2505 * @opcode 0x12
2506 * @opcodesub 11 mr/reg
2507 * @oppfx none
2508 * @opcpuid sse
2509 * @opgroup og_sse_simdfp_datamove
2510 * @opxcpttype 5
2511 * @optest op1=1 op2=2 -> op1=2
2512 * @optest op1=0 op2=-42 -> op1=-42
2513 */
2514 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2515
2516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2517 IEM_MC_BEGIN(0, 1);
2518 IEM_MC_LOCAL(uint64_t, uSrc);
2519
2520 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2521 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2522 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2523 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2524
2525 IEM_MC_ADVANCE_RIP_AND_FINISH();
2526 IEM_MC_END();
2527 }
2528 else
2529 {
2530 /**
2531 * @opdone
2532 * @opcode 0x12
2533 * @opcodesub !11 mr/reg
2534 * @oppfx none
2535 * @opcpuid sse
2536 * @opgroup og_sse_simdfp_datamove
2537 * @opxcpttype 5
2538 * @optest op1=1 op2=2 -> op1=2
2539 * @optest op1=0 op2=-42 -> op1=-42
2540 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2541 */
2542 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2543
2544 IEM_MC_BEGIN(0, 2);
2545 IEM_MC_LOCAL(uint64_t, uSrc);
2546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2547
2548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2550 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2551 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2552
2553 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2554 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2555
2556 IEM_MC_ADVANCE_RIP_AND_FINISH();
2557 IEM_MC_END();
2558 }
2559}
2560
2561
2562/**
2563 * @opcode 0x12
2564 * @opcodesub !11 mr/reg
2565 * @oppfx 0x66
2566 * @opcpuid sse2
2567 * @opgroup og_sse2_pcksclr_datamove
2568 * @opxcpttype 5
2569 * @optest op1=1 op2=2 -> op1=2
2570 * @optest op1=0 op2=-42 -> op1=-42
2571 */
2572FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2573{
2574 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2575 if (IEM_IS_MODRM_MEM_MODE(bRm))
2576 {
2577 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2578
2579 IEM_MC_BEGIN(0, 2);
2580 IEM_MC_LOCAL(uint64_t, uSrc);
2581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2582
2583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2585 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2586 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2587
2588 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2589 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2590
2591 IEM_MC_ADVANCE_RIP_AND_FINISH();
2592 IEM_MC_END();
2593 }
2594
2595 /**
2596 * @opdone
2597 * @opmnemonic ud660f12m3
2598 * @opcode 0x12
2599 * @opcodesub 11 mr/reg
2600 * @oppfx 0x66
2601 * @opunused immediate
2602 * @opcpuid sse
2603 * @optest ->
2604 */
2605 else
2606 IEMOP_RAISE_INVALID_OPCODE_RET();
2607}
2608
2609
2610/**
2611 * @opcode 0x12
2612 * @oppfx 0xf3
2613 * @opcpuid sse3
2614 * @opgroup og_sse3_pcksclr_datamove
2615 * @opxcpttype 4
2616 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2617 * op1=0x00000002000000020000000100000001
2618 */
2619FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2620{
2621 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2622 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2623 if (IEM_IS_MODRM_REG_MODE(bRm))
2624 {
2625 /*
2626 * XMM, XMM.
2627 */
2628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2629 IEM_MC_BEGIN(0, 1);
2630 IEM_MC_LOCAL(RTUINT128U, uSrc);
2631
2632 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2633 IEM_MC_PREPARE_SSE_USAGE();
2634
2635 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2636 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2637 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2638 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2639 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2640
2641 IEM_MC_ADVANCE_RIP_AND_FINISH();
2642 IEM_MC_END();
2643 }
2644 else
2645 {
2646 /*
2647 * XMM, [mem128].
2648 */
2649 IEM_MC_BEGIN(0, 2);
2650 IEM_MC_LOCAL(RTUINT128U, uSrc);
2651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2652
2653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2655 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2656 IEM_MC_PREPARE_SSE_USAGE();
2657
2658 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2659 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2660 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2661 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2662 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2663
2664 IEM_MC_ADVANCE_RIP_AND_FINISH();
2665 IEM_MC_END();
2666 }
2667}
2668
2669
2670/**
2671 * @opcode 0x12
2672 * @oppfx 0xf2
2673 * @opcpuid sse3
2674 * @opgroup og_sse3_pcksclr_datamove
2675 * @opxcpttype 5
2676 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2677 * op1=0x22222222111111112222222211111111
2678 */
2679FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2680{
2681 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2682 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2683 if (IEM_IS_MODRM_REG_MODE(bRm))
2684 {
2685 /*
2686 * XMM128, XMM64.
2687 */
2688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2689 IEM_MC_BEGIN(1, 0);
2690 IEM_MC_ARG(uint64_t, uSrc, 0);
2691
2692 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2693 IEM_MC_PREPARE_SSE_USAGE();
2694
2695 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2696 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2697 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2698
2699 IEM_MC_ADVANCE_RIP_AND_FINISH();
2700 IEM_MC_END();
2701 }
2702 else
2703 {
2704 /*
2705 * XMM128, [mem64].
2706 */
2707 IEM_MC_BEGIN(1, 1);
2708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2709 IEM_MC_ARG(uint64_t, uSrc, 0);
2710
2711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2713 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2714 IEM_MC_PREPARE_SSE_USAGE();
2715
2716 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2717 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2718 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2719
2720 IEM_MC_ADVANCE_RIP_AND_FINISH();
2721 IEM_MC_END();
2722 }
2723}
2724
2725
2726/**
2727 * @opcode 0x13
2728 * @opcodesub !11 mr/reg
2729 * @oppfx none
2730 * @opcpuid sse
2731 * @opgroup og_sse_simdfp_datamove
2732 * @opxcpttype 5
2733 * @optest op1=1 op2=2 -> op1=2
2734 * @optest op1=0 op2=-42 -> op1=-42
2735 */
2736FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2737{
2738 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2739 if (IEM_IS_MODRM_MEM_MODE(bRm))
2740 {
2741 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2742
2743 IEM_MC_BEGIN(0, 2);
2744 IEM_MC_LOCAL(uint64_t, uSrc);
2745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2746
2747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2749 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2750 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2751
2752 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2753 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2754
2755 IEM_MC_ADVANCE_RIP_AND_FINISH();
2756 IEM_MC_END();
2757 }
2758
2759 /**
2760 * @opdone
2761 * @opmnemonic ud0f13m3
2762 * @opcode 0x13
2763 * @opcodesub 11 mr/reg
2764 * @oppfx none
2765 * @opunused immediate
2766 * @opcpuid sse
2767 * @optest ->
2768 */
2769 else
2770 IEMOP_RAISE_INVALID_OPCODE_RET();
2771}
2772
2773
2774/**
2775 * @opcode 0x13
2776 * @opcodesub !11 mr/reg
2777 * @oppfx 0x66
2778 * @opcpuid sse2
2779 * @opgroup og_sse2_pcksclr_datamove
2780 * @opxcpttype 5
2781 * @optest op1=1 op2=2 -> op1=2
2782 * @optest op1=0 op2=-42 -> op1=-42
2783 */
2784FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2785{
2786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2787 if (IEM_IS_MODRM_MEM_MODE(bRm))
2788 {
2789 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2790 IEM_MC_BEGIN(0, 2);
2791 IEM_MC_LOCAL(uint64_t, uSrc);
2792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2793
2794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2796 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2797 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2798
2799 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2800 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2801
2802 IEM_MC_ADVANCE_RIP_AND_FINISH();
2803 IEM_MC_END();
2804 }
2805
2806 /**
2807 * @opdone
2808 * @opmnemonic ud660f13m3
2809 * @opcode 0x13
2810 * @opcodesub 11 mr/reg
2811 * @oppfx 0x66
2812 * @opunused immediate
2813 * @opcpuid sse
2814 * @optest ->
2815 */
2816 else
2817 IEMOP_RAISE_INVALID_OPCODE_RET();
2818}
2819
2820
2821/**
2822 * @opmnemonic udf30f13
2823 * @opcode 0x13
2824 * @oppfx 0xf3
2825 * @opunused intel-modrm
2826 * @opcpuid sse
2827 * @optest ->
2828 * @opdone
2829 */
2830
2831/**
2832 * @opmnemonic udf20f13
2833 * @opcode 0x13
2834 * @oppfx 0xf2
2835 * @opunused intel-modrm
2836 * @opcpuid sse
2837 * @optest ->
2838 * @opdone
2839 */
2840
2841/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2842FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2843{
2844 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2845 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2846}
2847
2848
2849/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2850FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2851{
2852 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2853 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2854}
2855
2856
2857/**
2858 * @opdone
2859 * @opmnemonic udf30f14
2860 * @opcode 0x14
2861 * @oppfx 0xf3
2862 * @opunused intel-modrm
2863 * @opcpuid sse
2864 * @optest ->
2865 * @opdone
2866 */
2867
2868/**
2869 * @opmnemonic udf20f14
2870 * @opcode 0x14
2871 * @oppfx 0xf2
2872 * @opunused intel-modrm
2873 * @opcpuid sse
2874 * @optest ->
2875 * @opdone
2876 */
2877
2878/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2879FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2880{
2881 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2882 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2883}
2884
2885
2886/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2887FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2888{
2889 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2890 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2891}
2892
2893
2894/* Opcode 0xf3 0x0f 0x15 - invalid */
2895/* Opcode 0xf2 0x0f 0x15 - invalid */
2896
2897/**
2898 * @opdone
2899 * @opmnemonic udf30f15
2900 * @opcode 0x15
2901 * @oppfx 0xf3
2902 * @opunused intel-modrm
2903 * @opcpuid sse
2904 * @optest ->
2905 * @opdone
2906 */
2907
2908/**
2909 * @opmnemonic udf20f15
2910 * @opcode 0x15
2911 * @oppfx 0xf2
2912 * @opunused intel-modrm
2913 * @opcpuid sse
2914 * @optest ->
2915 * @opdone
2916 */
2917
2918FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2919{
2920 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2921 if (IEM_IS_MODRM_REG_MODE(bRm))
2922 {
2923 /**
2924 * @opcode 0x16
2925 * @opcodesub 11 mr/reg
2926 * @oppfx none
2927 * @opcpuid sse
2928 * @opgroup og_sse_simdfp_datamove
2929 * @opxcpttype 5
2930 * @optest op1=1 op2=2 -> op1=2
2931 * @optest op1=0 op2=-42 -> op1=-42
2932 */
2933 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2934
2935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2936 IEM_MC_BEGIN(0, 1);
2937 IEM_MC_LOCAL(uint64_t, uSrc);
2938
2939 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2940 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2941 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2942 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2943
2944 IEM_MC_ADVANCE_RIP_AND_FINISH();
2945 IEM_MC_END();
2946 }
2947 else
2948 {
2949 /**
2950 * @opdone
2951 * @opcode 0x16
2952 * @opcodesub !11 mr/reg
2953 * @oppfx none
2954 * @opcpuid sse
2955 * @opgroup og_sse_simdfp_datamove
2956 * @opxcpttype 5
2957 * @optest op1=1 op2=2 -> op1=2
2958 * @optest op1=0 op2=-42 -> op1=-42
2959 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2960 */
2961 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2962
2963 IEM_MC_BEGIN(0, 2);
2964 IEM_MC_LOCAL(uint64_t, uSrc);
2965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2966
2967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2969 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2970 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2971
2972 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2973 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2974
2975 IEM_MC_ADVANCE_RIP_AND_FINISH();
2976 IEM_MC_END();
2977 }
2978}
2979
2980
2981/**
2982 * @opcode 0x16
2983 * @opcodesub !11 mr/reg
2984 * @oppfx 0x66
2985 * @opcpuid sse2
2986 * @opgroup og_sse2_pcksclr_datamove
2987 * @opxcpttype 5
2988 * @optest op1=1 op2=2 -> op1=2
2989 * @optest op1=0 op2=-42 -> op1=-42
2990 */
2991FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2992{
2993 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2994 if (IEM_IS_MODRM_MEM_MODE(bRm))
2995 {
2996 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2997 IEM_MC_BEGIN(0, 2);
2998 IEM_MC_LOCAL(uint64_t, uSrc);
2999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3000
3001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3003 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3004 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3005
3006 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3007 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3008
3009 IEM_MC_ADVANCE_RIP_AND_FINISH();
3010 IEM_MC_END();
3011 }
3012
3013 /**
3014 * @opdone
3015 * @opmnemonic ud660f16m3
3016 * @opcode 0x16
3017 * @opcodesub 11 mr/reg
3018 * @oppfx 0x66
3019 * @opunused immediate
3020 * @opcpuid sse
3021 * @optest ->
3022 */
3023 else
3024 IEMOP_RAISE_INVALID_OPCODE_RET();
3025}
3026
3027
3028/**
3029 * @opcode 0x16
3030 * @oppfx 0xf3
3031 * @opcpuid sse3
3032 * @opgroup og_sse3_pcksclr_datamove
3033 * @opxcpttype 4
3034 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3035 * op1=0x00000002000000020000000100000001
3036 */
3037FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3038{
3039 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3040 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3041 if (IEM_IS_MODRM_REG_MODE(bRm))
3042 {
3043 /*
3044 * XMM128, XMM128.
3045 */
3046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3047 IEM_MC_BEGIN(0, 1);
3048 IEM_MC_LOCAL(RTUINT128U, uSrc);
3049
3050 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3051 IEM_MC_PREPARE_SSE_USAGE();
3052
3053 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3054 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3055 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3056 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3057 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3058
3059 IEM_MC_ADVANCE_RIP_AND_FINISH();
3060 IEM_MC_END();
3061 }
3062 else
3063 {
3064 /*
3065 * XMM128, [mem128].
3066 */
3067 IEM_MC_BEGIN(0, 2);
3068 IEM_MC_LOCAL(RTUINT128U, uSrc);
3069 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3070
3071 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3073 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3074 IEM_MC_PREPARE_SSE_USAGE();
3075
3076 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3077 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3078 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3079 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3080 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3081
3082 IEM_MC_ADVANCE_RIP_AND_FINISH();
3083 IEM_MC_END();
3084 }
3085}
3086
3087/**
3088 * @opdone
3089 * @opmnemonic udf30f16
3090 * @opcode 0x16
3091 * @oppfx 0xf2
3092 * @opunused intel-modrm
3093 * @opcpuid sse
3094 * @optest ->
3095 * @opdone
3096 */
3097
3098
3099/**
3100 * @opcode 0x17
3101 * @opcodesub !11 mr/reg
3102 * @oppfx none
3103 * @opcpuid sse
3104 * @opgroup og_sse_simdfp_datamove
3105 * @opxcpttype 5
3106 * @optest op1=1 op2=2 -> op1=2
3107 * @optest op1=0 op2=-42 -> op1=-42
3108 */
3109FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3110{
3111 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3112 if (IEM_IS_MODRM_MEM_MODE(bRm))
3113 {
3114 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3115
3116 IEM_MC_BEGIN(0, 2);
3117 IEM_MC_LOCAL(uint64_t, uSrc);
3118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3119
3120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3122 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3123 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3124
3125 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3126 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3127
3128 IEM_MC_ADVANCE_RIP_AND_FINISH();
3129 IEM_MC_END();
3130 }
3131
3132 /**
3133 * @opdone
3134 * @opmnemonic ud0f17m3
3135 * @opcode 0x17
3136 * @opcodesub 11 mr/reg
3137 * @oppfx none
3138 * @opunused immediate
3139 * @opcpuid sse
3140 * @optest ->
3141 */
3142 else
3143 IEMOP_RAISE_INVALID_OPCODE_RET();
3144}
3145
3146
3147/**
3148 * @opcode 0x17
3149 * @opcodesub !11 mr/reg
3150 * @oppfx 0x66
3151 * @opcpuid sse2
3152 * @opgroup og_sse2_pcksclr_datamove
3153 * @opxcpttype 5
3154 * @optest op1=1 op2=2 -> op1=2
3155 * @optest op1=0 op2=-42 -> op1=-42
3156 */
3157FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3158{
3159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3160 if (IEM_IS_MODRM_MEM_MODE(bRm))
3161 {
3162 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3163
3164 IEM_MC_BEGIN(0, 2);
3165 IEM_MC_LOCAL(uint64_t, uSrc);
3166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3167
3168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3170 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3171 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3172
3173 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3174 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3175
3176 IEM_MC_ADVANCE_RIP_AND_FINISH();
3177 IEM_MC_END();
3178 }
3179
3180 /**
3181 * @opdone
3182 * @opmnemonic ud660f17m3
3183 * @opcode 0x17
3184 * @opcodesub 11 mr/reg
3185 * @oppfx 0x66
3186 * @opunused immediate
3187 * @opcpuid sse
3188 * @optest ->
3189 */
3190 else
3191 IEMOP_RAISE_INVALID_OPCODE_RET();
3192}
3193
3194
3195/**
3196 * @opdone
3197 * @opmnemonic udf30f17
3198 * @opcode 0x17
3199 * @oppfx 0xf3
3200 * @opunused intel-modrm
3201 * @opcpuid sse
3202 * @optest ->
3203 * @opdone
3204 */
3205
3206/**
3207 * @opmnemonic udf20f17
3208 * @opcode 0x17
3209 * @oppfx 0xf2
3210 * @opunused intel-modrm
3211 * @opcpuid sse
3212 * @optest ->
3213 * @opdone
3214 */
3215
3216
3217/** Opcode 0x0f 0x18. */
3218FNIEMOP_DEF(iemOp_prefetch_Grp16)
3219{
3220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3221 if (IEM_IS_MODRM_MEM_MODE(bRm))
3222 {
3223 switch (IEM_GET_MODRM_REG_8(bRm))
3224 {
3225 case 4: /* Aliased to /0 for the time being according to AMD. */
3226 case 5: /* Aliased to /0 for the time being according to AMD. */
3227 case 6: /* Aliased to /0 for the time being according to AMD. */
3228 case 7: /* Aliased to /0 for the time being according to AMD. */
3229 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3230 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3231 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3232 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3233 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3234 }
3235
3236 IEM_MC_BEGIN(0, 1);
3237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3240 /* Currently a NOP. */
3241 NOREF(GCPtrEffSrc);
3242 IEM_MC_ADVANCE_RIP_AND_FINISH();
3243 IEM_MC_END();
3244 }
3245 else
3246 IEMOP_RAISE_INVALID_OPCODE_RET();
3247}
3248
3249
3250/** Opcode 0x0f 0x19..0x1f. */
3251FNIEMOP_DEF(iemOp_nop_Ev)
3252{
3253 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3255 if (IEM_IS_MODRM_REG_MODE(bRm))
3256 {
3257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3258 IEM_MC_BEGIN(0, 0);
3259 IEM_MC_ADVANCE_RIP_AND_FINISH();
3260 IEM_MC_END();
3261 }
3262 else
3263 {
3264 IEM_MC_BEGIN(0, 1);
3265 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3268 /* Currently a NOP. */
3269 NOREF(GCPtrEffSrc);
3270 IEM_MC_ADVANCE_RIP_AND_FINISH();
3271 IEM_MC_END();
3272 }
3273}
3274
3275
3276/** Opcode 0x0f 0x20. */
3277FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3278{
3279 /* mod is ignored, as is operand size overrides. */
3280 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3281 IEMOP_HLP_MIN_386();
3282 if (IEM_IS_64BIT_CODE(pVCpu))
3283 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3284 else
3285 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3286
3287 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3288 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3289 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3290 {
3291 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3292 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3293 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3294 iCrReg |= 8;
3295 }
3296 switch (iCrReg)
3297 {
3298 case 0: case 2: case 3: case 4: case 8:
3299 break;
3300 default:
3301 IEMOP_RAISE_INVALID_OPCODE_RET();
3302 }
3303 IEMOP_HLP_DONE_DECODING();
3304
3305 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3306}
3307
3308
3309/** Opcode 0x0f 0x21. */
3310FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3311{
3312 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3313 IEMOP_HLP_MIN_386();
3314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3316 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3317 IEMOP_RAISE_INVALID_OPCODE_RET();
3318 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3319}
3320
3321
3322/** Opcode 0x0f 0x22. */
3323FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3324{
3325 /* mod is ignored, as is operand size overrides. */
3326 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3327 IEMOP_HLP_MIN_386();
3328 if (IEM_IS_64BIT_CODE(pVCpu))
3329 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3330 else
3331 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3332
3333 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3334 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3335 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3336 {
3337 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3338 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3339 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3340 iCrReg |= 8;
3341 }
3342 switch (iCrReg)
3343 {
3344 case 0: case 2: case 3: case 4: case 8:
3345 break;
3346 default:
3347 IEMOP_RAISE_INVALID_OPCODE_RET();
3348 }
3349 IEMOP_HLP_DONE_DECODING();
3350
3351 if (iCrReg & (2 | 8))
3352 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3353 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3354 else
3355 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT,
3356 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3357}
3358
3359
3360/** Opcode 0x0f 0x23. */
3361FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3362{
3363 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3364 IEMOP_HLP_MIN_386();
3365 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3367 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3368 IEMOP_RAISE_INVALID_OPCODE_RET();
3369 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3370}
3371
3372
3373/** Opcode 0x0f 0x24. */
3374FNIEMOP_DEF(iemOp_mov_Rd_Td)
3375{
3376 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3377 IEMOP_HLP_MIN_386();
3378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3380 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3381 IEMOP_RAISE_INVALID_OPCODE_RET();
3382 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3383}
3384
3385
3386/** Opcode 0x0f 0x26. */
3387FNIEMOP_DEF(iemOp_mov_Td_Rd)
3388{
3389 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3390 IEMOP_HLP_MIN_386();
3391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3393 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3394 IEMOP_RAISE_INVALID_OPCODE_RET();
3395 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3396}
3397
3398
3399/**
3400 * @opcode 0x28
3401 * @oppfx none
3402 * @opcpuid sse
3403 * @opgroup og_sse_simdfp_datamove
3404 * @opxcpttype 1
3405 * @optest op1=1 op2=2 -> op1=2
3406 * @optest op1=0 op2=-42 -> op1=-42
3407 */
3408FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3409{
3410 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3411 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3412 if (IEM_IS_MODRM_REG_MODE(bRm))
3413 {
3414 /*
3415 * Register, register.
3416 */
3417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3418 IEM_MC_BEGIN(0, 0);
3419 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3420 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3421 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3422 IEM_GET_MODRM_RM(pVCpu, bRm));
3423 IEM_MC_ADVANCE_RIP_AND_FINISH();
3424 IEM_MC_END();
3425 }
3426 else
3427 {
3428 /*
3429 * Register, memory.
3430 */
3431 IEM_MC_BEGIN(0, 2);
3432 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3434
3435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3437 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3438 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3439
3440 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3441 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3442
3443 IEM_MC_ADVANCE_RIP_AND_FINISH();
3444 IEM_MC_END();
3445 }
3446}
3447
3448/**
3449 * @opcode 0x28
3450 * @oppfx 66
3451 * @opcpuid sse2
3452 * @opgroup og_sse2_pcksclr_datamove
3453 * @opxcpttype 1
3454 * @optest op1=1 op2=2 -> op1=2
3455 * @optest op1=0 op2=-42 -> op1=-42
3456 */
3457FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3458{
3459 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3461 if (IEM_IS_MODRM_REG_MODE(bRm))
3462 {
3463 /*
3464 * Register, register.
3465 */
3466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3467 IEM_MC_BEGIN(0, 0);
3468 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3469 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3470 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3471 IEM_GET_MODRM_RM(pVCpu, bRm));
3472 IEM_MC_ADVANCE_RIP_AND_FINISH();
3473 IEM_MC_END();
3474 }
3475 else
3476 {
3477 /*
3478 * Register, memory.
3479 */
3480 IEM_MC_BEGIN(0, 2);
3481 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3483
3484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3486 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3487 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3488
3489 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3490 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3491
3492 IEM_MC_ADVANCE_RIP_AND_FINISH();
3493 IEM_MC_END();
3494 }
3495}
3496
3497/* Opcode 0xf3 0x0f 0x28 - invalid */
3498/* Opcode 0xf2 0x0f 0x28 - invalid */
3499
3500/**
3501 * @opcode 0x29
3502 * @oppfx none
3503 * @opcpuid sse
3504 * @opgroup og_sse_simdfp_datamove
3505 * @opxcpttype 1
3506 * @optest op1=1 op2=2 -> op1=2
3507 * @optest op1=0 op2=-42 -> op1=-42
3508 */
3509FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3510{
3511 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3513 if (IEM_IS_MODRM_REG_MODE(bRm))
3514 {
3515 /*
3516 * Register, register.
3517 */
3518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3519 IEM_MC_BEGIN(0, 0);
3520 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3521 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3522 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3523 IEM_GET_MODRM_REG(pVCpu, bRm));
3524 IEM_MC_ADVANCE_RIP_AND_FINISH();
3525 IEM_MC_END();
3526 }
3527 else
3528 {
3529 /*
3530 * Memory, register.
3531 */
3532 IEM_MC_BEGIN(0, 2);
3533 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3535
3536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3538 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3539 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3540
3541 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3542 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3543
3544 IEM_MC_ADVANCE_RIP_AND_FINISH();
3545 IEM_MC_END();
3546 }
3547}
3548
3549/**
3550 * @opcode 0x29
3551 * @oppfx 66
3552 * @opcpuid sse2
3553 * @opgroup og_sse2_pcksclr_datamove
3554 * @opxcpttype 1
3555 * @optest op1=1 op2=2 -> op1=2
3556 * @optest op1=0 op2=-42 -> op1=-42
3557 */
3558FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3559{
3560 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3562 if (IEM_IS_MODRM_REG_MODE(bRm))
3563 {
3564 /*
3565 * Register, register.
3566 */
3567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3568 IEM_MC_BEGIN(0, 0);
3569 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3570 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3571 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3572 IEM_GET_MODRM_REG(pVCpu, bRm));
3573 IEM_MC_ADVANCE_RIP_AND_FINISH();
3574 IEM_MC_END();
3575 }
3576 else
3577 {
3578 /*
3579 * Memory, register.
3580 */
3581 IEM_MC_BEGIN(0, 2);
3582 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3584
3585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3587 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3588 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3589
3590 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3591 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3592
3593 IEM_MC_ADVANCE_RIP_AND_FINISH();
3594 IEM_MC_END();
3595 }
3596}
3597
3598/* Opcode 0xf3 0x0f 0x29 - invalid */
3599/* Opcode 0xf2 0x0f 0x29 - invalid */
3600
3601
3602/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3603FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3604{
3605 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3607 if (IEM_IS_MODRM_REG_MODE(bRm))
3608 {
3609 /*
3610 * XMM, MMX
3611 */
3612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3613
3614 IEM_MC_BEGIN(3, 1);
3615 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3616 IEM_MC_LOCAL(X86XMMREG, Dst);
3617 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3618 IEM_MC_ARG(uint64_t, u64Src, 2);
3619 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3620 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3621 IEM_MC_PREPARE_FPU_USAGE();
3622 IEM_MC_FPU_TO_MMX_MODE();
3623
3624 IEM_MC_REF_MXCSR(pfMxcsr);
3625 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3626 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3627
3628 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3629 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3630 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3631 } IEM_MC_ELSE() {
3632 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3633 } IEM_MC_ENDIF();
3634
3635 IEM_MC_ADVANCE_RIP_AND_FINISH();
3636 IEM_MC_END();
3637 }
3638 else
3639 {
3640 /*
3641 * XMM, [mem64]
3642 */
3643 IEM_MC_BEGIN(3, 2);
3644 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3645 IEM_MC_LOCAL(X86XMMREG, Dst);
3646 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3647 IEM_MC_ARG(uint64_t, u64Src, 2);
3648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3649
3650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3652 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3653 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3654 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3655
3656 IEM_MC_PREPARE_FPU_USAGE();
3657 IEM_MC_FPU_TO_MMX_MODE();
3658 IEM_MC_REF_MXCSR(pfMxcsr);
3659
3660 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3661 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3662 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3663 } IEM_MC_ELSE() {
3664 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3665 } IEM_MC_ENDIF();
3666
3667 IEM_MC_ADVANCE_RIP_AND_FINISH();
3668 IEM_MC_END();
3669 }
3670}
3671
3672
3673/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3674FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3675{
3676 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3677 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3678 if (IEM_IS_MODRM_REG_MODE(bRm))
3679 {
3680 /*
3681 * XMM, MMX
3682 */
3683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3684
3685 IEM_MC_BEGIN(3, 1);
3686 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3687 IEM_MC_LOCAL(X86XMMREG, Dst);
3688 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3689 IEM_MC_ARG(uint64_t, u64Src, 2);
3690 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3691 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3692 IEM_MC_PREPARE_FPU_USAGE();
3693 IEM_MC_FPU_TO_MMX_MODE();
3694
3695 IEM_MC_REF_MXCSR(pfMxcsr);
3696 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3697
3698 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3699 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3700 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3701 } IEM_MC_ELSE() {
3702 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3703 } IEM_MC_ENDIF();
3704
3705 IEM_MC_ADVANCE_RIP_AND_FINISH();
3706 IEM_MC_END();
3707 }
3708 else
3709 {
3710 /*
3711 * XMM, [mem64]
3712 */
3713 IEM_MC_BEGIN(3, 3);
3714 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3715 IEM_MC_LOCAL(X86XMMREG, Dst);
3716 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3717 IEM_MC_ARG(uint64_t, u64Src, 2);
3718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3719
3720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3722 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3723 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3724 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3725
3726 /* Doesn't cause a transition to MMX mode. */
3727 IEM_MC_PREPARE_SSE_USAGE();
3728 IEM_MC_REF_MXCSR(pfMxcsr);
3729
3730 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3731 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3732 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3733 } IEM_MC_ELSE() {
3734 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3735 } IEM_MC_ENDIF();
3736
3737 IEM_MC_ADVANCE_RIP_AND_FINISH();
3738 IEM_MC_END();
3739 }
3740}
3741
3742
3743/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3744FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3745{
3746 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3747
3748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3749 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3750 {
3751 if (IEM_IS_MODRM_REG_MODE(bRm))
3752 {
3753 /* XMM, greg64 */
3754 IEM_MC_BEGIN(3, 2);
3755 IEM_MC_LOCAL(uint32_t, fMxcsr);
3756 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3757 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3758 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3759 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3760
3761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3762 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3763 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3764
3765 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3766 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3767 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3768 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3769 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3770 } IEM_MC_ELSE() {
3771 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3772 } IEM_MC_ENDIF();
3773
3774 IEM_MC_ADVANCE_RIP_AND_FINISH();
3775 IEM_MC_END();
3776 }
3777 else
3778 {
3779 /* XMM, [mem64] */
3780 IEM_MC_BEGIN(3, 4);
3781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3782 IEM_MC_LOCAL(uint32_t, fMxcsr);
3783 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3784 IEM_MC_LOCAL(int64_t, i64Src);
3785 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3786 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3787 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3788
3789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3791 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3792 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3793
3794 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3795 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3796 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3797 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3798 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3799 } IEM_MC_ELSE() {
3800 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3801 } IEM_MC_ENDIF();
3802
3803 IEM_MC_ADVANCE_RIP_AND_FINISH();
3804 IEM_MC_END();
3805 }
3806 }
3807 else
3808 {
3809 if (IEM_IS_MODRM_REG_MODE(bRm))
3810 {
3811 /* greg, XMM */
3812 IEM_MC_BEGIN(3, 2);
3813 IEM_MC_LOCAL(uint32_t, fMxcsr);
3814 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3815 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3816 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3817 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3818
3819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3820 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3821 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3822
3823 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3824 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3825 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3826 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3827 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3828 } IEM_MC_ELSE() {
3829 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3830 } IEM_MC_ENDIF();
3831
3832 IEM_MC_ADVANCE_RIP_AND_FINISH();
3833 IEM_MC_END();
3834 }
3835 else
3836 {
3837 /* greg, [mem32] */
3838 IEM_MC_BEGIN(3, 4);
3839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3840 IEM_MC_LOCAL(uint32_t, fMxcsr);
3841 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3842 IEM_MC_LOCAL(int32_t, i32Src);
3843 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3844 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3845 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3846
3847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3849 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3850 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3851
3852 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3853 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3854 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3855 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3856 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3857 } IEM_MC_ELSE() {
3858 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3859 } IEM_MC_ENDIF();
3860
3861 IEM_MC_ADVANCE_RIP_AND_FINISH();
3862 IEM_MC_END();
3863 }
3864 }
3865}
3866
3867
3868/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3869FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3870{
3871 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3872
3873 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3874 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3875 {
3876 if (IEM_IS_MODRM_REG_MODE(bRm))
3877 {
3878 /* XMM, greg64 */
3879 IEM_MC_BEGIN(3, 2);
3880 IEM_MC_LOCAL(uint32_t, fMxcsr);
3881 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3882 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3883 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3884 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3885
3886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3887 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3888 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3889
3890 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3891 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3892 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3893 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3894 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3895 } IEM_MC_ELSE() {
3896 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3897 } IEM_MC_ENDIF();
3898
3899 IEM_MC_ADVANCE_RIP_AND_FINISH();
3900 IEM_MC_END();
3901 }
3902 else
3903 {
3904 /* XMM, [mem64] */
3905 IEM_MC_BEGIN(3, 4);
3906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3907 IEM_MC_LOCAL(uint32_t, fMxcsr);
3908 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3909 IEM_MC_LOCAL(int64_t, i64Src);
3910 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3911 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3912 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3913
3914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3916 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3917 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3918
3919 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3920 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3921 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3922 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3923 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3924 } IEM_MC_ELSE() {
3925 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3926 } IEM_MC_ENDIF();
3927
3928 IEM_MC_ADVANCE_RIP_AND_FINISH();
3929 IEM_MC_END();
3930 }
3931 }
3932 else
3933 {
3934 if (IEM_IS_MODRM_REG_MODE(bRm))
3935 {
3936 /* XMM, greg32 */
3937 IEM_MC_BEGIN(3, 2);
3938 IEM_MC_LOCAL(uint32_t, fMxcsr);
3939 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3940 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3941 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3942 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3943
3944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3945 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3946 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3947
3948 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3949 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3950 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3951 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3952 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3953 } IEM_MC_ELSE() {
3954 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3955 } IEM_MC_ENDIF();
3956
3957 IEM_MC_ADVANCE_RIP_AND_FINISH();
3958 IEM_MC_END();
3959 }
3960 else
3961 {
3962 /* XMM, [mem32] */
3963 IEM_MC_BEGIN(3, 4);
3964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3965 IEM_MC_LOCAL(uint32_t, fMxcsr);
3966 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3967 IEM_MC_LOCAL(int32_t, i32Src);
3968 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3969 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3970 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3971
3972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3974 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3975 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3976
3977 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3978 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3979 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3980 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3981 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3982 } IEM_MC_ELSE() {
3983 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3984 } IEM_MC_ENDIF();
3985
3986 IEM_MC_ADVANCE_RIP_AND_FINISH();
3987 IEM_MC_END();
3988 }
3989 }
3990}
3991
3992
3993/**
3994 * @opcode 0x2b
3995 * @opcodesub !11 mr/reg
3996 * @oppfx none
3997 * @opcpuid sse
3998 * @opgroup og_sse1_cachect
3999 * @opxcpttype 1
4000 * @optest op1=1 op2=2 -> op1=2
4001 * @optest op1=0 op2=-42 -> op1=-42
4002 */
4003FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
4004{
4005 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4006 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4007 if (IEM_IS_MODRM_MEM_MODE(bRm))
4008 {
4009 /*
4010 * memory, register.
4011 */
4012 IEM_MC_BEGIN(0, 2);
4013 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4015
4016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4018 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4019 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4020
4021 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4022 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4023
4024 IEM_MC_ADVANCE_RIP_AND_FINISH();
4025 IEM_MC_END();
4026 }
4027 /* The register, register encoding is invalid. */
4028 else
4029 IEMOP_RAISE_INVALID_OPCODE_RET();
4030}
4031
4032/**
4033 * @opcode 0x2b
4034 * @opcodesub !11 mr/reg
4035 * @oppfx 0x66
4036 * @opcpuid sse2
4037 * @opgroup og_sse2_cachect
4038 * @opxcpttype 1
4039 * @optest op1=1 op2=2 -> op1=2
4040 * @optest op1=0 op2=-42 -> op1=-42
4041 */
4042FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
4043{
4044 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4045 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4046 if (IEM_IS_MODRM_MEM_MODE(bRm))
4047 {
4048 /*
4049 * memory, register.
4050 */
4051 IEM_MC_BEGIN(0, 2);
4052 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4054
4055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4057 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4058 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4059
4060 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4061 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4062
4063 IEM_MC_ADVANCE_RIP_AND_FINISH();
4064 IEM_MC_END();
4065 }
4066 /* The register, register encoding is invalid. */
4067 else
4068 IEMOP_RAISE_INVALID_OPCODE_RET();
4069}
4070/* Opcode 0xf3 0x0f 0x2b - invalid */
4071/* Opcode 0xf2 0x0f 0x2b - invalid */
4072
4073
4074/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
4075FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
4076{
4077 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4078 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4079 if (IEM_IS_MODRM_REG_MODE(bRm))
4080 {
4081 /*
4082 * Register, register.
4083 */
4084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4085
4086 IEM_MC_BEGIN(3, 1);
4087 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4088 IEM_MC_LOCAL(uint64_t, u64Dst);
4089 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4090 IEM_MC_ARG(uint64_t, u64Src, 2);
4091 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4092 IEM_MC_PREPARE_FPU_USAGE();
4093 IEM_MC_FPU_TO_MMX_MODE();
4094
4095 IEM_MC_REF_MXCSR(pfMxcsr);
4096 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4097
4098 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4099 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4100 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4101 } IEM_MC_ELSE() {
4102 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4103 } IEM_MC_ENDIF();
4104
4105 IEM_MC_ADVANCE_RIP_AND_FINISH();
4106 IEM_MC_END();
4107 }
4108 else
4109 {
4110 /*
4111 * Register, memory.
4112 */
4113 IEM_MC_BEGIN(3, 2);
4114 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4115 IEM_MC_LOCAL(uint64_t, u64Dst);
4116 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4117 IEM_MC_ARG(uint64_t, u64Src, 2);
4118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4119
4120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4122 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4123 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4124
4125 IEM_MC_PREPARE_FPU_USAGE();
4126 IEM_MC_FPU_TO_MMX_MODE();
4127 IEM_MC_REF_MXCSR(pfMxcsr);
4128
4129 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4130 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4131 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4132 } IEM_MC_ELSE() {
4133 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4134 } IEM_MC_ENDIF();
4135
4136 IEM_MC_ADVANCE_RIP_AND_FINISH();
4137 IEM_MC_END();
4138 }
4139}
4140
4141
4142/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4143FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4144{
4145 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4147 if (IEM_IS_MODRM_REG_MODE(bRm))
4148 {
4149 /*
4150 * Register, register.
4151 */
4152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4153
4154 IEM_MC_BEGIN(3, 1);
4155 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4156 IEM_MC_LOCAL(uint64_t, u64Dst);
4157 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4158 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4159 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4160 IEM_MC_PREPARE_FPU_USAGE();
4161 IEM_MC_FPU_TO_MMX_MODE();
4162
4163 IEM_MC_REF_MXCSR(pfMxcsr);
4164 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4165
4166 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4167 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4168 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4169 } IEM_MC_ELSE() {
4170 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4171 } IEM_MC_ENDIF();
4172
4173 IEM_MC_ADVANCE_RIP_AND_FINISH();
4174 IEM_MC_END();
4175 }
4176 else
4177 {
4178 /*
4179 * Register, memory.
4180 */
4181 IEM_MC_BEGIN(3, 3);
4182 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4183 IEM_MC_LOCAL(uint64_t, u64Dst);
4184 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4185 IEM_MC_LOCAL(X86XMMREG, uSrc);
4186 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4188
4189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4191 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4192 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4193
4194 IEM_MC_PREPARE_FPU_USAGE();
4195 IEM_MC_FPU_TO_MMX_MODE();
4196
4197 IEM_MC_REF_MXCSR(pfMxcsr);
4198
4199 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4200 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4201 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4202 } IEM_MC_ELSE() {
4203 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4204 } IEM_MC_ENDIF();
4205
4206 IEM_MC_ADVANCE_RIP_AND_FINISH();
4207 IEM_MC_END();
4208 }
4209}
4210
4211
4212/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4213FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4214{
4215 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4216
4217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4218 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4219 {
4220 if (IEM_IS_MODRM_REG_MODE(bRm))
4221 {
4222 /* greg64, XMM */
4223 IEM_MC_BEGIN(3, 2);
4224 IEM_MC_LOCAL(uint32_t, fMxcsr);
4225 IEM_MC_LOCAL(int64_t, i64Dst);
4226 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4227 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4228 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4229
4230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4231 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4232 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4233
4234 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4235 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4236 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4237 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4238 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4239 } IEM_MC_ELSE() {
4240 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4241 } IEM_MC_ENDIF();
4242
4243 IEM_MC_ADVANCE_RIP_AND_FINISH();
4244 IEM_MC_END();
4245 }
4246 else
4247 {
4248 /* greg64, [mem64] */
4249 IEM_MC_BEGIN(3, 4);
4250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4251 IEM_MC_LOCAL(uint32_t, fMxcsr);
4252 IEM_MC_LOCAL(int64_t, i64Dst);
4253 IEM_MC_LOCAL(uint32_t, u32Src);
4254 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4255 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4256 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4257
4258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4260 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4261 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4262
4263 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4264 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4265 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4266 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4267 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4268 } IEM_MC_ELSE() {
4269 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4270 } IEM_MC_ENDIF();
4271
4272 IEM_MC_ADVANCE_RIP_AND_FINISH();
4273 IEM_MC_END();
4274 }
4275 }
4276 else
4277 {
4278 if (IEM_IS_MODRM_REG_MODE(bRm))
4279 {
4280 /* greg, XMM */
4281 IEM_MC_BEGIN(3, 2);
4282 IEM_MC_LOCAL(uint32_t, fMxcsr);
4283 IEM_MC_LOCAL(int32_t, i32Dst);
4284 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4285 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4286 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4287
4288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4289 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4290 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4291
4292 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4293 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4294 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4295 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4296 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4297 } IEM_MC_ELSE() {
4298 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4299 } IEM_MC_ENDIF();
4300
4301 IEM_MC_ADVANCE_RIP_AND_FINISH();
4302 IEM_MC_END();
4303 }
4304 else
4305 {
4306 /* greg, [mem] */
4307 IEM_MC_BEGIN(3, 4);
4308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4309 IEM_MC_LOCAL(uint32_t, fMxcsr);
4310 IEM_MC_LOCAL(int32_t, i32Dst);
4311 IEM_MC_LOCAL(uint32_t, u32Src);
4312 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4313 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4314 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4315
4316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4318 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4319 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4320
4321 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4322 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4323 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4324 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4325 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4326 } IEM_MC_ELSE() {
4327 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4328 } IEM_MC_ENDIF();
4329
4330 IEM_MC_ADVANCE_RIP_AND_FINISH();
4331 IEM_MC_END();
4332 }
4333 }
4334}
4335
4336
4337/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4338FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4339{
4340 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4341
4342 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4343 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4344 {
4345 if (IEM_IS_MODRM_REG_MODE(bRm))
4346 {
4347 /* greg64, XMM */
4348 IEM_MC_BEGIN(3, 2);
4349 IEM_MC_LOCAL(uint32_t, fMxcsr);
4350 IEM_MC_LOCAL(int64_t, i64Dst);
4351 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4352 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4353 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4354
4355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4356 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4357 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4358
4359 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4360 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4361 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4362 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4363 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4364 } IEM_MC_ELSE() {
4365 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4366 } IEM_MC_ENDIF();
4367
4368 IEM_MC_ADVANCE_RIP_AND_FINISH();
4369 IEM_MC_END();
4370 }
4371 else
4372 {
4373 /* greg64, [mem64] */
4374 IEM_MC_BEGIN(3, 4);
4375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4376 IEM_MC_LOCAL(uint32_t, fMxcsr);
4377 IEM_MC_LOCAL(int64_t, i64Dst);
4378 IEM_MC_LOCAL(uint64_t, u64Src);
4379 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4380 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4381 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4382
4383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4385 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4386 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4387
4388 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4389 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4390 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4391 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4392 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4393 } IEM_MC_ELSE() {
4394 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4395 } IEM_MC_ENDIF();
4396
4397 IEM_MC_ADVANCE_RIP_AND_FINISH();
4398 IEM_MC_END();
4399 }
4400 }
4401 else
4402 {
4403 if (IEM_IS_MODRM_REG_MODE(bRm))
4404 {
4405 /* greg, XMM */
4406 IEM_MC_BEGIN(3, 2);
4407 IEM_MC_LOCAL(uint32_t, fMxcsr);
4408 IEM_MC_LOCAL(int32_t, i32Dst);
4409 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4410 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4411 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4412
4413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4414 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4415 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4416
4417 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4418 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4419 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4420 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4421 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4422 } IEM_MC_ELSE() {
4423 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4424 } IEM_MC_ENDIF();
4425
4426 IEM_MC_ADVANCE_RIP_AND_FINISH();
4427 IEM_MC_END();
4428 }
4429 else
4430 {
4431 /* greg32, [mem32] */
4432 IEM_MC_BEGIN(3, 4);
4433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4434 IEM_MC_LOCAL(uint32_t, fMxcsr);
4435 IEM_MC_LOCAL(int32_t, i32Dst);
4436 IEM_MC_LOCAL(uint64_t, u64Src);
4437 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4438 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4439 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4440
4441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4443 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4444 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4445
4446 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4447 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4448 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4449 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4450 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4451 } IEM_MC_ELSE() {
4452 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4453 } IEM_MC_ENDIF();
4454
4455 IEM_MC_ADVANCE_RIP_AND_FINISH();
4456 IEM_MC_END();
4457 }
4458 }
4459}
4460
4461
4462/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4463FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4464{
4465 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4467 if (IEM_IS_MODRM_REG_MODE(bRm))
4468 {
4469 /*
4470 * Register, register.
4471 */
4472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4473
4474 IEM_MC_BEGIN(3, 1);
4475 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4476 IEM_MC_LOCAL(uint64_t, u64Dst);
4477 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4478 IEM_MC_ARG(uint64_t, u64Src, 2);
4479
4480 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4481 IEM_MC_PREPARE_FPU_USAGE();
4482 IEM_MC_FPU_TO_MMX_MODE();
4483
4484 IEM_MC_REF_MXCSR(pfMxcsr);
4485 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4486
4487 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4488 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4489 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4490 } IEM_MC_ELSE() {
4491 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4492 } IEM_MC_ENDIF();
4493
4494 IEM_MC_ADVANCE_RIP_AND_FINISH();
4495 IEM_MC_END();
4496 }
4497 else
4498 {
4499 /*
4500 * Register, memory.
4501 */
4502 IEM_MC_BEGIN(3, 2);
4503 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4504 IEM_MC_LOCAL(uint64_t, u64Dst);
4505 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4506 IEM_MC_ARG(uint64_t, u64Src, 2);
4507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4508
4509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4511 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4512 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4513
4514 IEM_MC_PREPARE_FPU_USAGE();
4515 IEM_MC_FPU_TO_MMX_MODE();
4516 IEM_MC_REF_MXCSR(pfMxcsr);
4517
4518 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4519 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4520 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4521 } IEM_MC_ELSE() {
4522 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4523 } IEM_MC_ENDIF();
4524
4525 IEM_MC_ADVANCE_RIP_AND_FINISH();
4526 IEM_MC_END();
4527 }
4528}
4529
4530
4531/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4532FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4533{
4534 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4535 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4536 if (IEM_IS_MODRM_REG_MODE(bRm))
4537 {
4538 /*
4539 * Register, register.
4540 */
4541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4542
4543 IEM_MC_BEGIN(3, 1);
4544 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4545 IEM_MC_LOCAL(uint64_t, u64Dst);
4546 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4547 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4548
4549 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4550 IEM_MC_PREPARE_FPU_USAGE();
4551 IEM_MC_FPU_TO_MMX_MODE();
4552
4553 IEM_MC_REF_MXCSR(pfMxcsr);
4554 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4555
4556 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4557 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4558 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4559 } IEM_MC_ELSE() {
4560 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4561 } IEM_MC_ENDIF();
4562
4563 IEM_MC_ADVANCE_RIP_AND_FINISH();
4564 IEM_MC_END();
4565 }
4566 else
4567 {
4568 /*
4569 * Register, memory.
4570 */
4571 IEM_MC_BEGIN(3, 3);
4572 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4573 IEM_MC_LOCAL(uint64_t, u64Dst);
4574 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4575 IEM_MC_LOCAL(X86XMMREG, uSrc);
4576 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4578
4579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4581 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4582 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4583
4584 IEM_MC_PREPARE_FPU_USAGE();
4585 IEM_MC_FPU_TO_MMX_MODE();
4586
4587 IEM_MC_REF_MXCSR(pfMxcsr);
4588
4589 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4590 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4591 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4592 } IEM_MC_ELSE() {
4593 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4594 } IEM_MC_ENDIF();
4595
4596 IEM_MC_ADVANCE_RIP_AND_FINISH();
4597 IEM_MC_END();
4598 }
4599}
4600
4601
4602/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4603FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4604{
4605 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4606
4607 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4608 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4609 {
4610 if (IEM_IS_MODRM_REG_MODE(bRm))
4611 {
4612 /* greg64, XMM */
4613 IEM_MC_BEGIN(3, 2);
4614 IEM_MC_LOCAL(uint32_t, fMxcsr);
4615 IEM_MC_LOCAL(int64_t, i64Dst);
4616 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4617 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4618 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4619
4620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4621 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4622 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4623
4624 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4625 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4626 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4627 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4628 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4629 } IEM_MC_ELSE() {
4630 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4631 } IEM_MC_ENDIF();
4632
4633 IEM_MC_ADVANCE_RIP_AND_FINISH();
4634 IEM_MC_END();
4635 }
4636 else
4637 {
4638 /* greg64, [mem64] */
4639 IEM_MC_BEGIN(3, 4);
4640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4641 IEM_MC_LOCAL(uint32_t, fMxcsr);
4642 IEM_MC_LOCAL(int64_t, i64Dst);
4643 IEM_MC_LOCAL(uint32_t, u32Src);
4644 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4645 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4646 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4647
4648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4650 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4651 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4652
4653 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4654 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4655 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4656 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4657 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4658 } IEM_MC_ELSE() {
4659 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4660 } IEM_MC_ENDIF();
4661
4662 IEM_MC_ADVANCE_RIP_AND_FINISH();
4663 IEM_MC_END();
4664 }
4665 }
4666 else
4667 {
4668 if (IEM_IS_MODRM_REG_MODE(bRm))
4669 {
4670 /* greg, XMM */
4671 IEM_MC_BEGIN(3, 2);
4672 IEM_MC_LOCAL(uint32_t, fMxcsr);
4673 IEM_MC_LOCAL(int32_t, i32Dst);
4674 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4675 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4676 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4677
4678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4679 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4680 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4681
4682 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4683 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4684 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4685 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4686 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4687 } IEM_MC_ELSE() {
4688 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4689 } IEM_MC_ENDIF();
4690
4691 IEM_MC_ADVANCE_RIP_AND_FINISH();
4692 IEM_MC_END();
4693 }
4694 else
4695 {
4696 /* greg, [mem] */
4697 IEM_MC_BEGIN(3, 4);
4698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4699 IEM_MC_LOCAL(uint32_t, fMxcsr);
4700 IEM_MC_LOCAL(int32_t, i32Dst);
4701 IEM_MC_LOCAL(uint32_t, u32Src);
4702 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4703 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4704 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4705
4706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4708 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4709 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4710
4711 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4712 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4713 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4714 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4715 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4716 } IEM_MC_ELSE() {
4717 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4718 } IEM_MC_ENDIF();
4719
4720 IEM_MC_ADVANCE_RIP_AND_FINISH();
4721 IEM_MC_END();
4722 }
4723 }
4724}
4725
4726
4727/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4728FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4729{
4730 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4731
4732 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4733 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4734 {
4735 if (IEM_IS_MODRM_REG_MODE(bRm))
4736 {
4737 /* greg64, XMM */
4738 IEM_MC_BEGIN(3, 2);
4739 IEM_MC_LOCAL(uint32_t, fMxcsr);
4740 IEM_MC_LOCAL(int64_t, i64Dst);
4741 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4742 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4743 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4744
4745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4746 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4747 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4748
4749 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4750 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4751 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4752 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4753 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4754 } IEM_MC_ELSE() {
4755 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4756 } IEM_MC_ENDIF();
4757
4758 IEM_MC_ADVANCE_RIP_AND_FINISH();
4759 IEM_MC_END();
4760 }
4761 else
4762 {
4763 /* greg64, [mem64] */
4764 IEM_MC_BEGIN(3, 4);
4765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4766 IEM_MC_LOCAL(uint32_t, fMxcsr);
4767 IEM_MC_LOCAL(int64_t, i64Dst);
4768 IEM_MC_LOCAL(uint64_t, u64Src);
4769 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4770 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4771 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4772
4773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4775 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4776 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4777
4778 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4779 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4780 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4781 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4782 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4783 } IEM_MC_ELSE() {
4784 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4785 } IEM_MC_ENDIF();
4786
4787 IEM_MC_ADVANCE_RIP_AND_FINISH();
4788 IEM_MC_END();
4789 }
4790 }
4791 else
4792 {
4793 if (IEM_IS_MODRM_REG_MODE(bRm))
4794 {
4795 /* greg32, XMM */
4796 IEM_MC_BEGIN(3, 2);
4797 IEM_MC_LOCAL(uint32_t, fMxcsr);
4798 IEM_MC_LOCAL(int32_t, i32Dst);
4799 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4800 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4801 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4802
4803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4804 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4805 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4806
4807 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4808 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4809 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4810 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4811 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4812 } IEM_MC_ELSE() {
4813 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4814 } IEM_MC_ENDIF();
4815
4816 IEM_MC_ADVANCE_RIP_AND_FINISH();
4817 IEM_MC_END();
4818 }
4819 else
4820 {
4821 /* greg32, [mem64] */
4822 IEM_MC_BEGIN(3, 4);
4823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4824 IEM_MC_LOCAL(uint32_t, fMxcsr);
4825 IEM_MC_LOCAL(int32_t, i32Dst);
4826 IEM_MC_LOCAL(uint64_t, u64Src);
4827 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4828 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4829 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4830
4831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4833 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4834 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4835
4836 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4837 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4838 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4839 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4840 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4841 } IEM_MC_ELSE() {
4842 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4843 } IEM_MC_ENDIF();
4844
4845 IEM_MC_ADVANCE_RIP_AND_FINISH();
4846 IEM_MC_END();
4847 }
4848 }
4849}
4850
4851
4852/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
4853FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4854{
4855 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4856 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4857 if (IEM_IS_MODRM_REG_MODE(bRm))
4858 {
4859 /*
4860 * Register, register.
4861 */
4862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4863 IEM_MC_BEGIN(4, 1);
4864 IEM_MC_LOCAL(uint32_t, fEFlags);
4865 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4866 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4867 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4868 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4869 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4870 IEM_MC_PREPARE_SSE_USAGE();
4871 IEM_MC_FETCH_EFLAGS(fEFlags);
4872 IEM_MC_REF_MXCSR(pfMxcsr);
4873 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4874 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4875 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4876 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4877 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4878 } IEM_MC_ELSE() {
4879 IEM_MC_COMMIT_EFLAGS(fEFlags);
4880 } IEM_MC_ENDIF();
4881
4882 IEM_MC_ADVANCE_RIP_AND_FINISH();
4883 IEM_MC_END();
4884 }
4885 else
4886 {
4887 /*
4888 * Register, memory.
4889 */
4890 IEM_MC_BEGIN(4, 3);
4891 IEM_MC_LOCAL(uint32_t, fEFlags);
4892 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4893 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4894 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4895 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4896 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4898
4899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4901 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4902 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4903
4904 IEM_MC_PREPARE_SSE_USAGE();
4905 IEM_MC_FETCH_EFLAGS(fEFlags);
4906 IEM_MC_REF_MXCSR(pfMxcsr);
4907 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4908 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4909 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4910 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4911 } IEM_MC_ELSE() {
4912 IEM_MC_COMMIT_EFLAGS(fEFlags);
4913 } IEM_MC_ENDIF();
4914
4915 IEM_MC_ADVANCE_RIP_AND_FINISH();
4916 IEM_MC_END();
4917 }
4918}
4919
4920
4921/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
4922FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4923{
4924 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4926 if (IEM_IS_MODRM_REG_MODE(bRm))
4927 {
4928 /*
4929 * Register, register.
4930 */
4931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4932 IEM_MC_BEGIN(4, 1);
4933 IEM_MC_LOCAL(uint32_t, fEFlags);
4934 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4935 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4936 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4937 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4938 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4939 IEM_MC_PREPARE_SSE_USAGE();
4940 IEM_MC_FETCH_EFLAGS(fEFlags);
4941 IEM_MC_REF_MXCSR(pfMxcsr);
4942 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4943 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4944 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4945 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4946 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4947 } IEM_MC_ELSE() {
4948 IEM_MC_COMMIT_EFLAGS(fEFlags);
4949 } IEM_MC_ENDIF();
4950
4951 IEM_MC_ADVANCE_RIP_AND_FINISH();
4952 IEM_MC_END();
4953 }
4954 else
4955 {
4956 /*
4957 * Register, memory.
4958 */
4959 IEM_MC_BEGIN(4, 3);
4960 IEM_MC_LOCAL(uint32_t, fEFlags);
4961 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4962 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4963 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4964 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4965 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4967
4968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4970 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4971 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4972
4973 IEM_MC_PREPARE_SSE_USAGE();
4974 IEM_MC_FETCH_EFLAGS(fEFlags);
4975 IEM_MC_REF_MXCSR(pfMxcsr);
4976 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4977 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4978 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4979 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4980 } IEM_MC_ELSE() {
4981 IEM_MC_COMMIT_EFLAGS(fEFlags);
4982 } IEM_MC_ENDIF();
4983
4984 IEM_MC_ADVANCE_RIP_AND_FINISH();
4985 IEM_MC_END();
4986 }
4987}
4988
4989
4990/* Opcode 0xf3 0x0f 0x2e - invalid */
4991/* Opcode 0xf2 0x0f 0x2e - invalid */
4992
4993
4994/** Opcode 0x0f 0x2f - comiss Vss, Wss */
4995FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
4996{
4997 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4998 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4999 if (IEM_IS_MODRM_REG_MODE(bRm))
5000 {
5001 /*
5002 * Register, register.
5003 */
5004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5005 IEM_MC_BEGIN(4, 1);
5006 IEM_MC_LOCAL(uint32_t, fEFlags);
5007 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5008 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5009 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5010 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5011 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5012 IEM_MC_PREPARE_SSE_USAGE();
5013 IEM_MC_FETCH_EFLAGS(fEFlags);
5014 IEM_MC_REF_MXCSR(pfMxcsr);
5015 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5016 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5017 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5018 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5019 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5020 } IEM_MC_ELSE() {
5021 IEM_MC_COMMIT_EFLAGS(fEFlags);
5022 } IEM_MC_ENDIF();
5023
5024 IEM_MC_ADVANCE_RIP_AND_FINISH();
5025 IEM_MC_END();
5026 }
5027 else
5028 {
5029 /*
5030 * Register, memory.
5031 */
5032 IEM_MC_BEGIN(4, 3);
5033 IEM_MC_LOCAL(uint32_t, fEFlags);
5034 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5035 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5036 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5037 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5038 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5040
5041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5043 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5044 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5045
5046 IEM_MC_PREPARE_SSE_USAGE();
5047 IEM_MC_FETCH_EFLAGS(fEFlags);
5048 IEM_MC_REF_MXCSR(pfMxcsr);
5049 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5050 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5051 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5052 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5053 } IEM_MC_ELSE() {
5054 IEM_MC_COMMIT_EFLAGS(fEFlags);
5055 } IEM_MC_ENDIF();
5056
5057 IEM_MC_ADVANCE_RIP_AND_FINISH();
5058 IEM_MC_END();
5059 }
5060}
5061
5062
5063/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
5064FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
5065{
5066 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5067 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5068 if (IEM_IS_MODRM_REG_MODE(bRm))
5069 {
5070 /*
5071 * Register, register.
5072 */
5073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5074 IEM_MC_BEGIN(4, 1);
5075 IEM_MC_LOCAL(uint32_t, fEFlags);
5076 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5077 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5078 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5079 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5080 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5081 IEM_MC_PREPARE_SSE_USAGE();
5082 IEM_MC_FETCH_EFLAGS(fEFlags);
5083 IEM_MC_REF_MXCSR(pfMxcsr);
5084 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5085 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5086 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5087 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5088 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5089 } IEM_MC_ELSE() {
5090 IEM_MC_COMMIT_EFLAGS(fEFlags);
5091 } IEM_MC_ENDIF();
5092
5093 IEM_MC_ADVANCE_RIP_AND_FINISH();
5094 IEM_MC_END();
5095 }
5096 else
5097 {
5098 /*
5099 * Register, memory.
5100 */
5101 IEM_MC_BEGIN(4, 3);
5102 IEM_MC_LOCAL(uint32_t, fEFlags);
5103 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5104 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5105 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5106 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5107 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5109
5110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5112 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5113 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5114
5115 IEM_MC_PREPARE_SSE_USAGE();
5116 IEM_MC_FETCH_EFLAGS(fEFlags);
5117 IEM_MC_REF_MXCSR(pfMxcsr);
5118 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5119 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5120 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5121 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5122 } IEM_MC_ELSE() {
5123 IEM_MC_COMMIT_EFLAGS(fEFlags);
5124 } IEM_MC_ENDIF();
5125
5126 IEM_MC_ADVANCE_RIP_AND_FINISH();
5127 IEM_MC_END();
5128 }
5129}
5130
5131
5132/* Opcode 0xf3 0x0f 0x2f - invalid */
5133/* Opcode 0xf2 0x0f 0x2f - invalid */
5134
5135/** Opcode 0x0f 0x30. */
5136FNIEMOP_DEF(iemOp_wrmsr)
5137{
5138 IEMOP_MNEMONIC(wrmsr, "wrmsr");
5139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5140 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_wrmsr);
5141}
5142
5143
5144/** Opcode 0x0f 0x31. */
5145FNIEMOP_DEF(iemOp_rdtsc)
5146{
5147 IEMOP_MNEMONIC(rdtsc, "rdtsc");
5148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5149 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdtsc);
5150}
5151
5152
5153/** Opcode 0x0f 0x33. */
5154FNIEMOP_DEF(iemOp_rdmsr)
5155{
5156 IEMOP_MNEMONIC(rdmsr, "rdmsr");
5157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5158 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdmsr);
5159}
5160
5161
5162/** Opcode 0x0f 0x34. */
5163FNIEMOP_DEF(iemOp_rdpmc)
5164{
5165 IEMOP_MNEMONIC(rdpmc, "rdpmc");
5166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5167 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdpmc);
5168}
5169
5170
5171/** Opcode 0x0f 0x34. */
5172FNIEMOP_DEF(iemOp_sysenter)
5173{
5174 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5176 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
5177 iemCImpl_sysenter);
5178}
5179
5180/** Opcode 0x0f 0x35. */
5181FNIEMOP_DEF(iemOp_sysexit)
5182{
5183 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5185 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
5186 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
5187}
5188
5189/** Opcode 0x0f 0x37. */
5190FNIEMOP_STUB(iemOp_getsec);
5191
5192
5193/** Opcode 0x0f 0x38. */
5194FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
5195{
5196#ifdef IEM_WITH_THREE_0F_38
5197 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5198 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5199#else
5200 IEMOP_BITCH_ABOUT_STUB();
5201 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5202#endif
5203}
5204
5205
5206/** Opcode 0x0f 0x3a. */
5207FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
5208{
5209#ifdef IEM_WITH_THREE_0F_3A
5210 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5211 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5212#else
5213 IEMOP_BITCH_ABOUT_STUB();
5214 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5215#endif
5216}
5217
5218
5219/**
5220 * Implements a conditional move.
5221 *
5222 * Wish there was an obvious way to do this where we could share and reduce
5223 * code bloat.
5224 *
5225 * @param a_Cnd The conditional "microcode" operation.
5226 */
5227#define CMOV_X(a_Cnd) \
5228 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5229 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5230 { \
5231 switch (pVCpu->iem.s.enmEffOpSize) \
5232 { \
5233 case IEMMODE_16BIT: \
5234 IEM_MC_BEGIN(0, 1); \
5235 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5236 a_Cnd { \
5237 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5238 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5239 } IEM_MC_ENDIF(); \
5240 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5241 IEM_MC_END(); \
5242 break; \
5243 \
5244 case IEMMODE_32BIT: \
5245 IEM_MC_BEGIN(0, 1); \
5246 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5247 a_Cnd { \
5248 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5249 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5250 } IEM_MC_ELSE() { \
5251 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5252 } IEM_MC_ENDIF(); \
5253 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5254 IEM_MC_END(); \
5255 break; \
5256 \
5257 case IEMMODE_64BIT: \
5258 IEM_MC_BEGIN(0, 1); \
5259 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5260 a_Cnd { \
5261 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5262 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5263 } IEM_MC_ENDIF(); \
5264 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5265 IEM_MC_END(); \
5266 break; \
5267 \
5268 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5269 } \
5270 } \
5271 else \
5272 { \
5273 switch (pVCpu->iem.s.enmEffOpSize) \
5274 { \
5275 case IEMMODE_16BIT: \
5276 IEM_MC_BEGIN(0, 2); \
5277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5278 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5280 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5281 a_Cnd { \
5282 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5283 } IEM_MC_ENDIF(); \
5284 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5285 IEM_MC_END(); \
5286 break; \
5287 \
5288 case IEMMODE_32BIT: \
5289 IEM_MC_BEGIN(0, 2); \
5290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5291 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5293 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5294 a_Cnd { \
5295 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5296 } IEM_MC_ELSE() { \
5297 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5298 } IEM_MC_ENDIF(); \
5299 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5300 IEM_MC_END(); \
5301 break; \
5302 \
5303 case IEMMODE_64BIT: \
5304 IEM_MC_BEGIN(0, 2); \
5305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5306 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5308 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5309 a_Cnd { \
5310 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5311 } IEM_MC_ENDIF(); \
5312 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5313 IEM_MC_END(); \
5314 break; \
5315 \
5316 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5317 } \
5318 } do {} while (0)
5319
5320
5321
5322/** Opcode 0x0f 0x40. */
5323FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5324{
5325 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5326 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5327}
5328
5329
5330/** Opcode 0x0f 0x41. */
5331FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5332{
5333 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5334 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5335}
5336
5337
5338/** Opcode 0x0f 0x42. */
5339FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5340{
5341 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5342 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5343}
5344
5345
5346/** Opcode 0x0f 0x43. */
5347FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5348{
5349 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5350 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5351}
5352
5353
5354/** Opcode 0x0f 0x44. */
5355FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5356{
5357 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5358 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5359}
5360
5361
5362/** Opcode 0x0f 0x45. */
5363FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5364{
5365 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5366 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5367}
5368
5369
5370/** Opcode 0x0f 0x46. */
5371FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5372{
5373 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5374 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5375}
5376
5377
5378/** Opcode 0x0f 0x47. */
5379FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5380{
5381 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5382 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5383}
5384
5385
5386/** Opcode 0x0f 0x48. */
5387FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5388{
5389 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5390 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5391}
5392
5393
5394/** Opcode 0x0f 0x49. */
5395FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5396{
5397 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5398 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5399}
5400
5401
5402/** Opcode 0x0f 0x4a. */
5403FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5404{
5405 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5406 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5407}
5408
5409
5410/** Opcode 0x0f 0x4b. */
5411FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5412{
5413 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5414 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5415}
5416
5417
5418/** Opcode 0x0f 0x4c. */
5419FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5420{
5421 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5422 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5423}
5424
5425
5426/** Opcode 0x0f 0x4d. */
5427FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5428{
5429 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5430 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5431}
5432
5433
5434/** Opcode 0x0f 0x4e. */
5435FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5436{
5437 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5438 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5439}
5440
5441
5442/** Opcode 0x0f 0x4f. */
5443FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5444{
5445 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5446 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5447}
5448
5449#undef CMOV_X
5450
5451/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5452FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5453{
5454 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5456 if (IEM_IS_MODRM_REG_MODE(bRm))
5457 {
5458 /*
5459 * Register, register.
5460 */
5461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5462 IEM_MC_BEGIN(2, 1);
5463 IEM_MC_LOCAL(uint8_t, u8Dst);
5464 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5465 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5466 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5467 IEM_MC_PREPARE_SSE_USAGE();
5468 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5469 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5470 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5471 IEM_MC_ADVANCE_RIP_AND_FINISH();
5472 IEM_MC_END();
5473 }
5474 /* No memory operand. */
5475 else
5476 IEMOP_RAISE_INVALID_OPCODE_RET();
5477}
5478
5479
5480/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5481FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5482{
5483 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5484 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5485 if (IEM_IS_MODRM_REG_MODE(bRm))
5486 {
5487 /*
5488 * Register, register.
5489 */
5490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5491 IEM_MC_BEGIN(2, 1);
5492 IEM_MC_LOCAL(uint8_t, u8Dst);
5493 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5494 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5495 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5496 IEM_MC_PREPARE_SSE_USAGE();
5497 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5498 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5499 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG_8(bRm), u8Dst);
5500 IEM_MC_ADVANCE_RIP_AND_FINISH();
5501 IEM_MC_END();
5502 }
5503 /* No memory operand. */
5504 else
5505 IEMOP_RAISE_INVALID_OPCODE_RET();
5506
5507}
5508
5509
5510/* Opcode 0xf3 0x0f 0x50 - invalid */
5511/* Opcode 0xf2 0x0f 0x50 - invalid */
5512
5513
5514/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5515FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5516{
5517 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5518 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5519}
5520
5521
5522/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5523FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5524{
5525 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5526 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5527}
5528
5529
5530/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5531FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5532{
5533 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5534 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5535}
5536
5537
5538/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5539FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5540{
5541 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5542 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5543}
5544
5545
5546/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5547FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5548{
5549 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5550 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5551}
5552
5553
5554/* Opcode 0x66 0x0f 0x52 - invalid */
5555
5556
5557/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5558FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5559{
5560 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5561 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5562}
5563
5564
5565/* Opcode 0xf2 0x0f 0x52 - invalid */
5566
5567/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5568FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
5569/* Opcode 0x66 0x0f 0x53 - invalid */
5570/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5571FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
5572/* Opcode 0xf2 0x0f 0x53 - invalid */
5573
5574
5575/** Opcode 0x0f 0x54 - andps Vps, Wps */
5576FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5577{
5578 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5579 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pand_u128);
5580}
5581
5582
5583/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5584FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5585{
5586 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5587 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5588}
5589
5590
5591/* Opcode 0xf3 0x0f 0x54 - invalid */
5592/* Opcode 0xf2 0x0f 0x54 - invalid */
5593
5594
5595/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5596FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5597{
5598 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5599 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pandn_u128);
5600}
5601
5602
5603/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5604FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5605{
5606 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5607 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5608}
5609
5610
5611/* Opcode 0xf3 0x0f 0x55 - invalid */
5612/* Opcode 0xf2 0x0f 0x55 - invalid */
5613
5614
5615/** Opcode 0x0f 0x56 - orps Vps, Wps */
5616FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5617{
5618 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5619 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_por_u128);
5620}
5621
5622
5623/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5624FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5625{
5626 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5627 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5628}
5629
5630
5631/* Opcode 0xf3 0x0f 0x56 - invalid */
5632/* Opcode 0xf2 0x0f 0x56 - invalid */
5633
5634
5635/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5636FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5637{
5638 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5639 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pxor_u128);
5640}
5641
5642
5643/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5644FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5645{
5646 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5647 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5648}
5649
5650
5651/* Opcode 0xf3 0x0f 0x57 - invalid */
5652/* Opcode 0xf2 0x0f 0x57 - invalid */
5653
5654/** Opcode 0x0f 0x58 - addps Vps, Wps */
5655FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5656{
5657 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5658 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5659}
5660
5661
5662/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5663FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5664{
5665 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5666 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5667}
5668
5669
5670/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5671FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5672{
5673 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5674 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5675}
5676
5677
5678/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5679FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5680{
5681 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5682 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5683}
5684
5685
5686/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5687FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5688{
5689 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5690 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5691}
5692
5693
5694/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5695FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5696{
5697 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5698 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5699}
5700
5701
5702/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5703FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5704{
5705 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5706 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5707}
5708
5709
5710/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5711FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5712{
5713 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5714 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5715}
5716
5717
5718/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5719FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5720{
5721 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5722 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5723}
5724
5725
5726/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5727FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5728{
5729 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5730 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5731}
5732
5733
5734/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5735FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5736{
5737 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5738 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5739}
5740
5741
5742/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5743FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5744{
5745 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5746 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5747}
5748
5749
5750/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5751FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5752{
5753 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5754 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5755}
5756
5757
5758/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5759FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5760{
5761 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5762 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5763}
5764
5765
5766/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5767FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5768{
5769 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5770 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5771}
5772
5773
5774/* Opcode 0xf2 0x0f 0x5b - invalid */
5775
5776
5777/** Opcode 0x0f 0x5c - subps Vps, Wps */
5778FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5779{
5780 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5781 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5782}
5783
5784
5785/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5786FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5787{
5788 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5789 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5790}
5791
5792
5793/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5794FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5795{
5796 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5797 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5798}
5799
5800
5801/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5802FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5803{
5804 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5805 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5806}
5807
5808
5809/** Opcode 0x0f 0x5d - minps Vps, Wps */
5810FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5811{
5812 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5813 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5814}
5815
5816
5817/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5818FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5819{
5820 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5821 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5822}
5823
5824
5825/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5826FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5827{
5828 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5829 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5830}
5831
5832
5833/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5834FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5835{
5836 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5837 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5838}
5839
5840
5841/** Opcode 0x0f 0x5e - divps Vps, Wps */
5842FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5843{
5844 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5845 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5846}
5847
5848
5849/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5850FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5851{
5852 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5853 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5854}
5855
5856
5857/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5858FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5859{
5860 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5861 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5862}
5863
5864
5865/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5866FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5867{
5868 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5869 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5870}
5871
5872
5873/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5874FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5875{
5876 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5877 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5878}
5879
5880
5881/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5882FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5883{
5884 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5885 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5886}
5887
5888
5889/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5890FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5891{
5892 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5893 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5894}
5895
5896
5897/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5898FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5899{
5900 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5901 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5902}
5903
5904
5905/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5906FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5907{
5908 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5909 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5910}
5911
5912
5913/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5914FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5915{
5916 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5917 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5918}
5919
5920
5921/* Opcode 0xf3 0x0f 0x60 - invalid */
5922
5923
5924/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5925FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5926{
5927 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5928 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5929 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5930}
5931
5932
5933/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5934FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5935{
5936 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5937 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5938}
5939
5940
5941/* Opcode 0xf3 0x0f 0x61 - invalid */
5942
5943
5944/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5945FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5946{
5947 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5948 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5949}
5950
5951
5952/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5953FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5954{
5955 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5956 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5957}
5958
5959
5960/* Opcode 0xf3 0x0f 0x62 - invalid */
5961
5962
5963
5964/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5965FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5966{
5967 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5968 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5969}
5970
5971
5972/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5973FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5974{
5975 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5976 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5977}
5978
5979
5980/* Opcode 0xf3 0x0f 0x63 - invalid */
5981
5982
5983/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5984FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5985{
5986 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5987 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5988}
5989
5990
5991/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5992FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5993{
5994 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5995 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
5996}
5997
5998
5999/* Opcode 0xf3 0x0f 0x64 - invalid */
6000
6001
6002/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
6003FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
6004{
6005 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6006 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
6007}
6008
6009
6010/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
6011FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
6012{
6013 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6014 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
6015}
6016
6017
6018/* Opcode 0xf3 0x0f 0x65 - invalid */
6019
6020
6021/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
6022FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
6023{
6024 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6025 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
6026}
6027
6028
6029/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
6030FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
6031{
6032 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6033 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
6034}
6035
6036
6037/* Opcode 0xf3 0x0f 0x66 - invalid */
6038
6039
6040/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
6041FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
6042{
6043 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6044 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
6045}
6046
6047
6048/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
6049FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
6050{
6051 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6052 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
6053}
6054
6055
6056/* Opcode 0xf3 0x0f 0x67 - invalid */
6057
6058
6059/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
6060 * @note Intel and AMD both uses Qd for the second parameter, however they
6061 * both list it as a mmX/mem64 operand and intel describes it as being
6062 * loaded as a qword, so it should be Qq, shouldn't it? */
6063FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
6064{
6065 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6066 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
6067}
6068
6069
6070/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
6071FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
6072{
6073 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6074 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
6075}
6076
6077
6078/* Opcode 0xf3 0x0f 0x68 - invalid */
6079
6080
6081/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
6082 * @note Intel and AMD both uses Qd for the second parameter, however they
6083 * both list it as a mmX/mem64 operand and intel describes it as being
6084 * loaded as a qword, so it should be Qq, shouldn't it? */
6085FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
6086{
6087 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6088 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
6089}
6090
6091
6092/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
6093FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
6094{
6095 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6096 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
6097
6098}
6099
6100
6101/* Opcode 0xf3 0x0f 0x69 - invalid */
6102
6103
6104/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
6105 * @note Intel and AMD both uses Qd for the second parameter, however they
6106 * both list it as a mmX/mem64 operand and intel describes it as being
6107 * loaded as a qword, so it should be Qq, shouldn't it? */
6108FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
6109{
6110 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6111 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
6112}
6113
6114
6115/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
6116FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
6117{
6118 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6119 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
6120}
6121
6122
6123/* Opcode 0xf3 0x0f 0x6a - invalid */
6124
6125
6126/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
6127FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
6128{
6129 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6130 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
6131}
6132
6133
6134/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
6135FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
6136{
6137 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6138 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
6139}
6140
6141
6142/* Opcode 0xf3 0x0f 0x6b - invalid */
6143
6144
6145/* Opcode 0x0f 0x6c - invalid */
6146
6147
6148/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
6149FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
6150{
6151 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6152 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
6153}
6154
6155
6156/* Opcode 0xf3 0x0f 0x6c - invalid */
6157/* Opcode 0xf2 0x0f 0x6c - invalid */
6158
6159
6160/* Opcode 0x0f 0x6d - invalid */
6161
6162
6163/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
6164FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
6165{
6166 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6167 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6168}
6169
6170
6171/* Opcode 0xf3 0x0f 0x6d - invalid */
6172
6173
6174FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6175{
6176 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6177 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6178 {
6179 /**
6180 * @opcode 0x6e
6181 * @opcodesub rex.w=1
6182 * @oppfx none
6183 * @opcpuid mmx
6184 * @opgroup og_mmx_datamove
6185 * @opxcpttype 5
6186 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6187 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6188 */
6189 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6190 if (IEM_IS_MODRM_REG_MODE(bRm))
6191 {
6192 /* MMX, greg64 */
6193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6194 IEM_MC_BEGIN(0, 1);
6195 IEM_MC_LOCAL(uint64_t, u64Tmp);
6196
6197 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6198 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6199 IEM_MC_FPU_TO_MMX_MODE();
6200
6201 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6202 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6203
6204 IEM_MC_ADVANCE_RIP_AND_FINISH();
6205 IEM_MC_END();
6206 }
6207 else
6208 {
6209 /* MMX, [mem64] */
6210 IEM_MC_BEGIN(0, 2);
6211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6212 IEM_MC_LOCAL(uint64_t, u64Tmp);
6213
6214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6216 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6217 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6218 IEM_MC_FPU_TO_MMX_MODE();
6219
6220 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6221 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6222
6223 IEM_MC_ADVANCE_RIP_AND_FINISH();
6224 IEM_MC_END();
6225 }
6226 }
6227 else
6228 {
6229 /**
6230 * @opdone
6231 * @opcode 0x6e
6232 * @opcodesub rex.w=0
6233 * @oppfx none
6234 * @opcpuid mmx
6235 * @opgroup og_mmx_datamove
6236 * @opxcpttype 5
6237 * @opfunction iemOp_movd_q_Pd_Ey
6238 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6239 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6240 */
6241 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6242 if (IEM_IS_MODRM_REG_MODE(bRm))
6243 {
6244 /* MMX, greg32 */
6245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6246 IEM_MC_BEGIN(0, 1);
6247 IEM_MC_LOCAL(uint32_t, u32Tmp);
6248
6249 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6250 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6251 IEM_MC_FPU_TO_MMX_MODE();
6252
6253 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6254 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6255
6256 IEM_MC_ADVANCE_RIP_AND_FINISH();
6257 IEM_MC_END();
6258 }
6259 else
6260 {
6261 /* MMX, [mem32] */
6262 IEM_MC_BEGIN(0, 2);
6263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6264 IEM_MC_LOCAL(uint32_t, u32Tmp);
6265
6266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6268 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6269 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6270 IEM_MC_FPU_TO_MMX_MODE();
6271
6272 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6273 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6274
6275 IEM_MC_ADVANCE_RIP_AND_FINISH();
6276 IEM_MC_END();
6277 }
6278 }
6279}
6280
6281FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6282{
6283 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6284 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6285 {
6286 /**
6287 * @opcode 0x6e
6288 * @opcodesub rex.w=1
6289 * @oppfx 0x66
6290 * @opcpuid sse2
6291 * @opgroup og_sse2_simdint_datamove
6292 * @opxcpttype 5
6293 * @optest 64-bit / op1=1 op2=2 -> op1=2
6294 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6295 */
6296 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6297 if (IEM_IS_MODRM_REG_MODE(bRm))
6298 {
6299 /* XMM, greg64 */
6300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6301 IEM_MC_BEGIN(0, 1);
6302 IEM_MC_LOCAL(uint64_t, u64Tmp);
6303
6304 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6305 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6306
6307 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6308 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6309
6310 IEM_MC_ADVANCE_RIP_AND_FINISH();
6311 IEM_MC_END();
6312 }
6313 else
6314 {
6315 /* XMM, [mem64] */
6316 IEM_MC_BEGIN(0, 2);
6317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6318 IEM_MC_LOCAL(uint64_t, u64Tmp);
6319
6320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6322 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6323 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6324
6325 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6326 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6327
6328 IEM_MC_ADVANCE_RIP_AND_FINISH();
6329 IEM_MC_END();
6330 }
6331 }
6332 else
6333 {
6334 /**
6335 * @opdone
6336 * @opcode 0x6e
6337 * @opcodesub rex.w=0
6338 * @oppfx 0x66
6339 * @opcpuid sse2
6340 * @opgroup og_sse2_simdint_datamove
6341 * @opxcpttype 5
6342 * @opfunction iemOp_movd_q_Vy_Ey
6343 * @optest op1=1 op2=2 -> op1=2
6344 * @optest op1=0 op2=-42 -> op1=-42
6345 */
6346 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6347 if (IEM_IS_MODRM_REG_MODE(bRm))
6348 {
6349 /* XMM, greg32 */
6350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6351 IEM_MC_BEGIN(0, 1);
6352 IEM_MC_LOCAL(uint32_t, u32Tmp);
6353
6354 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6355 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6356
6357 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6358 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6359
6360 IEM_MC_ADVANCE_RIP_AND_FINISH();
6361 IEM_MC_END();
6362 }
6363 else
6364 {
6365 /* XMM, [mem32] */
6366 IEM_MC_BEGIN(0, 2);
6367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6368 IEM_MC_LOCAL(uint32_t, u32Tmp);
6369
6370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6372 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6373 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6374
6375 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6376 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6377
6378 IEM_MC_ADVANCE_RIP_AND_FINISH();
6379 IEM_MC_END();
6380 }
6381 }
6382}
6383
6384/* Opcode 0xf3 0x0f 0x6e - invalid */
6385
6386
6387/**
6388 * @opcode 0x6f
6389 * @oppfx none
6390 * @opcpuid mmx
6391 * @opgroup og_mmx_datamove
6392 * @opxcpttype 5
6393 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6394 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6395 */
6396FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6397{
6398 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6399 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6400 if (IEM_IS_MODRM_REG_MODE(bRm))
6401 {
6402 /*
6403 * Register, register.
6404 */
6405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6406 IEM_MC_BEGIN(0, 1);
6407 IEM_MC_LOCAL(uint64_t, u64Tmp);
6408
6409 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6410 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6411 IEM_MC_FPU_TO_MMX_MODE();
6412
6413 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6414 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6415
6416 IEM_MC_ADVANCE_RIP_AND_FINISH();
6417 IEM_MC_END();
6418 }
6419 else
6420 {
6421 /*
6422 * Register, memory.
6423 */
6424 IEM_MC_BEGIN(0, 2);
6425 IEM_MC_LOCAL(uint64_t, u64Tmp);
6426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6427
6428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6430 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6431 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6432 IEM_MC_FPU_TO_MMX_MODE();
6433
6434 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6435 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6436
6437 IEM_MC_ADVANCE_RIP_AND_FINISH();
6438 IEM_MC_END();
6439 }
6440}
6441
6442/**
6443 * @opcode 0x6f
6444 * @oppfx 0x66
6445 * @opcpuid sse2
6446 * @opgroup og_sse2_simdint_datamove
6447 * @opxcpttype 1
6448 * @optest op1=1 op2=2 -> op1=2
6449 * @optest op1=0 op2=-42 -> op1=-42
6450 */
6451FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6452{
6453 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6454 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6455 if (IEM_IS_MODRM_REG_MODE(bRm))
6456 {
6457 /*
6458 * Register, register.
6459 */
6460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6461 IEM_MC_BEGIN(0, 0);
6462
6463 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6464 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6465
6466 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6467 IEM_GET_MODRM_RM(pVCpu, bRm));
6468 IEM_MC_ADVANCE_RIP_AND_FINISH();
6469 IEM_MC_END();
6470 }
6471 else
6472 {
6473 /*
6474 * Register, memory.
6475 */
6476 IEM_MC_BEGIN(0, 2);
6477 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6478 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6479
6480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6482 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6483 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6484
6485 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6486 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6487
6488 IEM_MC_ADVANCE_RIP_AND_FINISH();
6489 IEM_MC_END();
6490 }
6491}
6492
6493/**
6494 * @opcode 0x6f
6495 * @oppfx 0xf3
6496 * @opcpuid sse2
6497 * @opgroup og_sse2_simdint_datamove
6498 * @opxcpttype 4UA
6499 * @optest op1=1 op2=2 -> op1=2
6500 * @optest op1=0 op2=-42 -> op1=-42
6501 */
6502FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6503{
6504 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6505 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6506 if (IEM_IS_MODRM_REG_MODE(bRm))
6507 {
6508 /*
6509 * Register, register.
6510 */
6511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6512 IEM_MC_BEGIN(0, 0);
6513 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6514 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6515 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6516 IEM_GET_MODRM_RM(pVCpu, bRm));
6517 IEM_MC_ADVANCE_RIP_AND_FINISH();
6518 IEM_MC_END();
6519 }
6520 else
6521 {
6522 /*
6523 * Register, memory.
6524 */
6525 IEM_MC_BEGIN(0, 2);
6526 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6528
6529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6531 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6532 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6533 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6534 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6535
6536 IEM_MC_ADVANCE_RIP_AND_FINISH();
6537 IEM_MC_END();
6538 }
6539}
6540
6541
6542/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6543FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6544{
6545 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6546 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6547 if (IEM_IS_MODRM_REG_MODE(bRm))
6548 {
6549 /*
6550 * Register, register.
6551 */
6552 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6554
6555 IEM_MC_BEGIN(3, 0);
6556 IEM_MC_ARG(uint64_t *, pDst, 0);
6557 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6558 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6559 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6560 IEM_MC_PREPARE_FPU_USAGE();
6561 IEM_MC_FPU_TO_MMX_MODE();
6562
6563 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6564 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6565 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6566 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6567
6568 IEM_MC_ADVANCE_RIP_AND_FINISH();
6569 IEM_MC_END();
6570 }
6571 else
6572 {
6573 /*
6574 * Register, memory.
6575 */
6576 IEM_MC_BEGIN(3, 2);
6577 IEM_MC_ARG(uint64_t *, pDst, 0);
6578 IEM_MC_LOCAL(uint64_t, uSrc);
6579 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6581
6582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6583 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6584 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6586 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6587 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6588
6589 IEM_MC_PREPARE_FPU_USAGE();
6590 IEM_MC_FPU_TO_MMX_MODE();
6591
6592 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6593 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6594 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6595
6596 IEM_MC_ADVANCE_RIP_AND_FINISH();
6597 IEM_MC_END();
6598 }
6599}
6600
6601
6602/**
6603 * Common worker for SSE2 instructions on the forms:
6604 * pshufd xmm1, xmm2/mem128, imm8
6605 * pshufhw xmm1, xmm2/mem128, imm8
6606 * pshuflw xmm1, xmm2/mem128, imm8
6607 *
6608 * Proper alignment of the 128-bit operand is enforced.
6609 * Exceptions type 4. SSE2 cpuid checks.
6610 */
6611FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6612{
6613 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6614 if (IEM_IS_MODRM_REG_MODE(bRm))
6615 {
6616 /*
6617 * Register, register.
6618 */
6619 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6621
6622 IEM_MC_BEGIN(3, 0);
6623 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6624 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6625 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6626 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6627 IEM_MC_PREPARE_SSE_USAGE();
6628 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6629 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6630 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6631 IEM_MC_ADVANCE_RIP_AND_FINISH();
6632 IEM_MC_END();
6633 }
6634 else
6635 {
6636 /*
6637 * Register, memory.
6638 */
6639 IEM_MC_BEGIN(3, 2);
6640 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6641 IEM_MC_LOCAL(RTUINT128U, uSrc);
6642 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6644
6645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6646 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6647 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6649 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6650
6651 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6652 IEM_MC_PREPARE_SSE_USAGE();
6653 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6654 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6655
6656 IEM_MC_ADVANCE_RIP_AND_FINISH();
6657 IEM_MC_END();
6658 }
6659}
6660
6661
6662/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6663FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6664{
6665 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6666 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6667}
6668
6669
6670/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6671FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6672{
6673 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6674 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6675}
6676
6677
6678/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6679FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6680{
6681 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6682 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6683}
6684
6685
6686/**
6687 * Common worker for MMX instructions of the form:
6688 * psrlw mm, imm8
6689 * psraw mm, imm8
6690 * psllw mm, imm8
6691 * psrld mm, imm8
6692 * psrad mm, imm8
6693 * pslld mm, imm8
6694 * psrlq mm, imm8
6695 * psllq mm, imm8
6696 *
6697 */
6698FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6699{
6700 if (IEM_IS_MODRM_REG_MODE(bRm))
6701 {
6702 /*
6703 * Register, immediate.
6704 */
6705 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6707
6708 IEM_MC_BEGIN(2, 0);
6709 IEM_MC_ARG(uint64_t *, pDst, 0);
6710 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6711 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6712 IEM_MC_PREPARE_FPU_USAGE();
6713 IEM_MC_FPU_TO_MMX_MODE();
6714
6715 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6716 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6717 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6718
6719 IEM_MC_ADVANCE_RIP_AND_FINISH();
6720 IEM_MC_END();
6721 }
6722 else
6723 {
6724 /*
6725 * Register, memory not supported.
6726 */
6727 /// @todo Caller already enforced register mode?!
6728 AssertFailedReturn(VINF_SUCCESS);
6729 }
6730}
6731
6732
6733/**
6734 * Common worker for SSE2 instructions of the form:
6735 * psrlw xmm, imm8
6736 * psraw xmm, imm8
6737 * psllw xmm, imm8
6738 * psrld xmm, imm8
6739 * psrad xmm, imm8
6740 * pslld xmm, imm8
6741 * psrlq xmm, imm8
6742 * psllq xmm, imm8
6743 *
6744 */
6745FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6746{
6747 if (IEM_IS_MODRM_REG_MODE(bRm))
6748 {
6749 /*
6750 * Register, immediate.
6751 */
6752 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6754
6755 IEM_MC_BEGIN(2, 0);
6756 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6757 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6758 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6759 IEM_MC_PREPARE_SSE_USAGE();
6760 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6761 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6762 IEM_MC_ADVANCE_RIP_AND_FINISH();
6763 IEM_MC_END();
6764 }
6765 else
6766 {
6767 /*
6768 * Register, memory.
6769 */
6770 /// @todo Caller already enforced register mode?!
6771 AssertFailedReturn(VINF_SUCCESS);
6772 }
6773}
6774
6775
6776/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6777FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6778{
6779// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6780 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6781}
6782
6783
6784/** Opcode 0x66 0x0f 0x71 11/2. */
6785FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6786{
6787// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6788 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6789}
6790
6791
6792/** Opcode 0x0f 0x71 11/4. */
6793FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6794{
6795// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6796 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6797}
6798
6799
6800/** Opcode 0x66 0x0f 0x71 11/4. */
6801FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6802{
6803// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6804 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6805}
6806
6807
6808/** Opcode 0x0f 0x71 11/6. */
6809FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6810{
6811// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6812 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6813}
6814
6815
6816/** Opcode 0x66 0x0f 0x71 11/6. */
6817FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6818{
6819// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6820 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6821}
6822
6823
6824/**
6825 * Group 12 jump table for register variant.
6826 */
6827IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6828{
6829 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6830 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6831 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6832 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6833 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6834 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6835 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6836 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6837};
6838AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6839
6840
6841/** Opcode 0x0f 0x71. */
6842FNIEMOP_DEF(iemOp_Grp12)
6843{
6844 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6845 if (IEM_IS_MODRM_REG_MODE(bRm))
6846 /* register, register */
6847 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6848 + pVCpu->iem.s.idxPrefix], bRm);
6849 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6850}
6851
6852
6853/** Opcode 0x0f 0x72 11/2. */
6854FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6855{
6856// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6857 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6858}
6859
6860
6861/** Opcode 0x66 0x0f 0x72 11/2. */
6862FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6863{
6864// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6865 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6866}
6867
6868
6869/** Opcode 0x0f 0x72 11/4. */
6870FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6871{
6872// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6873 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6874}
6875
6876
6877/** Opcode 0x66 0x0f 0x72 11/4. */
6878FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6879{
6880// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6881 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6882}
6883
6884
6885/** Opcode 0x0f 0x72 11/6. */
6886FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6887{
6888// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6889 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6890}
6891
6892/** Opcode 0x66 0x0f 0x72 11/6. */
6893FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6894{
6895// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6896 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6897}
6898
6899
6900/**
6901 * Group 13 jump table for register variant.
6902 */
6903IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6904{
6905 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6906 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6907 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6908 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6909 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6910 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6911 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6912 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6913};
6914AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6915
6916/** Opcode 0x0f 0x72. */
6917FNIEMOP_DEF(iemOp_Grp13)
6918{
6919 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6920 if (IEM_IS_MODRM_REG_MODE(bRm))
6921 /* register, register */
6922 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6923 + pVCpu->iem.s.idxPrefix], bRm);
6924 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6925}
6926
6927
6928/** Opcode 0x0f 0x73 11/2. */
6929FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6930{
6931// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6932 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6933}
6934
6935
6936/** Opcode 0x66 0x0f 0x73 11/2. */
6937FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6938{
6939// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6940 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6941}
6942
6943
6944/** Opcode 0x66 0x0f 0x73 11/3. */
6945FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6946{
6947// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6948 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6949}
6950
6951
6952/** Opcode 0x0f 0x73 11/6. */
6953FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6954{
6955// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6956 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6957}
6958
6959
6960/** Opcode 0x66 0x0f 0x73 11/6. */
6961FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6962{
6963// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6964 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6965}
6966
6967
6968/** Opcode 0x66 0x0f 0x73 11/7. */
6969FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6970{
6971// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6972 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6973}
6974
6975/**
6976 * Group 14 jump table for register variant.
6977 */
6978IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6979{
6980 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6981 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6982 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6983 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6984 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6985 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6986 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6987 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6988};
6989AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6990
6991
6992/** Opcode 0x0f 0x73. */
6993FNIEMOP_DEF(iemOp_Grp14)
6994{
6995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6996 if (IEM_IS_MODRM_REG_MODE(bRm))
6997 /* register, register */
6998 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6999 + pVCpu->iem.s.idxPrefix], bRm);
7000 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
7001}
7002
7003
7004/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
7005FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
7006{
7007 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7008 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
7009}
7010
7011
7012/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
7013FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
7014{
7015 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7016 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
7017}
7018
7019
7020/* Opcode 0xf3 0x0f 0x74 - invalid */
7021/* Opcode 0xf2 0x0f 0x74 - invalid */
7022
7023
7024/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
7025FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
7026{
7027 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7028 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
7029}
7030
7031
7032/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
7033FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
7034{
7035 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7036 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
7037}
7038
7039
7040/* Opcode 0xf3 0x0f 0x75 - invalid */
7041/* Opcode 0xf2 0x0f 0x75 - invalid */
7042
7043
7044/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
7045FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
7046{
7047 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7048 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
7049}
7050
7051
7052/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
7053FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
7054{
7055 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7056 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
7057}
7058
7059
7060/* Opcode 0xf3 0x0f 0x76 - invalid */
7061/* Opcode 0xf2 0x0f 0x76 - invalid */
7062
7063
7064/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
7065FNIEMOP_DEF(iemOp_emms)
7066{
7067 IEMOP_MNEMONIC(emms, "emms");
7068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7069
7070 IEM_MC_BEGIN(0,0);
7071 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7072 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7073 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7074 IEM_MC_FPU_FROM_MMX_MODE();
7075 IEM_MC_ADVANCE_RIP_AND_FINISH();
7076 IEM_MC_END();
7077}
7078
7079/* Opcode 0x66 0x0f 0x77 - invalid */
7080/* Opcode 0xf3 0x0f 0x77 - invalid */
7081/* Opcode 0xf2 0x0f 0x77 - invalid */
7082
7083/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
7084#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7085FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
7086{
7087 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
7088 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
7089 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
7090 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7091
7092 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7093 if (IEM_IS_MODRM_REG_MODE(bRm))
7094 {
7095 /*
7096 * Register, register.
7097 */
7098 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7099 if (enmEffOpSize == IEMMODE_64BIT)
7100 {
7101 IEM_MC_BEGIN(2, 0);
7102 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7103 IEM_MC_ARG(uint64_t, u64Enc, 1);
7104 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7105 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7106 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmread_reg64, pu64Dst, u64Enc);
7107 IEM_MC_END();
7108 }
7109 else
7110 {
7111 IEM_MC_BEGIN(2, 0);
7112 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7113 IEM_MC_ARG(uint32_t, u32Enc, 1);
7114 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7115 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7116 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmread_reg32, pu64Dst, u32Enc);
7117 IEM_MC_END();
7118 }
7119 }
7120 else
7121 {
7122 /*
7123 * Memory, register.
7124 */
7125 if (enmEffOpSize == IEMMODE_64BIT)
7126 {
7127 IEM_MC_BEGIN(3, 0);
7128 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7129 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7130 IEM_MC_ARG(uint64_t, u64Enc, 2);
7131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7132 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7133 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7134 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7135 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7136 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7137 IEM_MC_END();
7138 }
7139 else
7140 {
7141 IEM_MC_BEGIN(3, 0);
7142 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7143 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7144 IEM_MC_ARG(uint32_t, u32Enc, 2);
7145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7146 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7147 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7148 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7149 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7150 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7151 IEM_MC_END();
7152 }
7153 }
7154}
7155#else
7156FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
7157#endif
7158
7159/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7160FNIEMOP_STUB(iemOp_AmdGrp17);
7161/* Opcode 0xf3 0x0f 0x78 - invalid */
7162/* Opcode 0xf2 0x0f 0x78 - invalid */
7163
7164/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7165#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7166FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7167{
7168 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7169 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7170 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7171 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7172
7173 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7174 if (IEM_IS_MODRM_REG_MODE(bRm))
7175 {
7176 /*
7177 * Register, register.
7178 */
7179 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7180 if (enmEffOpSize == IEMMODE_64BIT)
7181 {
7182 IEM_MC_BEGIN(2, 0);
7183 IEM_MC_ARG(uint64_t, u64Val, 0);
7184 IEM_MC_ARG(uint64_t, u64Enc, 1);
7185 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7186 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7187 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmwrite_reg, u64Val, u64Enc);
7188 IEM_MC_END();
7189 }
7190 else
7191 {
7192 IEM_MC_BEGIN(2, 0);
7193 IEM_MC_ARG(uint32_t, u32Val, 0);
7194 IEM_MC_ARG(uint32_t, u32Enc, 1);
7195 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7196 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7197 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmwrite_reg, u32Val, u32Enc);
7198 IEM_MC_END();
7199 }
7200 }
7201 else
7202 {
7203 /*
7204 * Register, memory.
7205 */
7206 if (enmEffOpSize == IEMMODE_64BIT)
7207 {
7208 IEM_MC_BEGIN(3, 0);
7209 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7210 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7211 IEM_MC_ARG(uint64_t, u64Enc, 2);
7212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7213 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7214 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7215 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7216 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7217 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7218 IEM_MC_END();
7219 }
7220 else
7221 {
7222 IEM_MC_BEGIN(3, 0);
7223 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7224 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7225 IEM_MC_ARG(uint32_t, u32Enc, 2);
7226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7227 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7228 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7229 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7230 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7231 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7232 IEM_MC_END();
7233 }
7234 }
7235}
7236#else
7237FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
7238#endif
7239/* Opcode 0x66 0x0f 0x79 - invalid */
7240/* Opcode 0xf3 0x0f 0x79 - invalid */
7241/* Opcode 0xf2 0x0f 0x79 - invalid */
7242
7243/* Opcode 0x0f 0x7a - invalid */
7244/* Opcode 0x66 0x0f 0x7a - invalid */
7245/* Opcode 0xf3 0x0f 0x7a - invalid */
7246/* Opcode 0xf2 0x0f 0x7a - invalid */
7247
7248/* Opcode 0x0f 0x7b - invalid */
7249/* Opcode 0x66 0x0f 0x7b - invalid */
7250/* Opcode 0xf3 0x0f 0x7b - invalid */
7251/* Opcode 0xf2 0x0f 0x7b - invalid */
7252
7253/* Opcode 0x0f 0x7c - invalid */
7254
7255
7256/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7257FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7258{
7259 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7260 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7261}
7262
7263
7264/* Opcode 0xf3 0x0f 0x7c - invalid */
7265
7266
7267/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7268FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7269{
7270 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7271 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7272}
7273
7274
7275/* Opcode 0x0f 0x7d - invalid */
7276
7277
7278/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7279FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7280{
7281 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7282 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7283}
7284
7285
7286/* Opcode 0xf3 0x0f 0x7d - invalid */
7287
7288
7289/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7290FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7291{
7292 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7293 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7294}
7295
7296
7297/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7298FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7299{
7300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7301 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7302 {
7303 /**
7304 * @opcode 0x7e
7305 * @opcodesub rex.w=1
7306 * @oppfx none
7307 * @opcpuid mmx
7308 * @opgroup og_mmx_datamove
7309 * @opxcpttype 5
7310 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7311 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7312 */
7313 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7314 if (IEM_IS_MODRM_REG_MODE(bRm))
7315 {
7316 /* greg64, MMX */
7317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7318 IEM_MC_BEGIN(0, 1);
7319 IEM_MC_LOCAL(uint64_t, u64Tmp);
7320
7321 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7322 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7323 IEM_MC_FPU_TO_MMX_MODE();
7324
7325 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7326 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7327
7328 IEM_MC_ADVANCE_RIP_AND_FINISH();
7329 IEM_MC_END();
7330 }
7331 else
7332 {
7333 /* [mem64], MMX */
7334 IEM_MC_BEGIN(0, 2);
7335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7336 IEM_MC_LOCAL(uint64_t, u64Tmp);
7337
7338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7340 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7341 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7342 IEM_MC_FPU_TO_MMX_MODE();
7343
7344 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7345 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7346
7347 IEM_MC_ADVANCE_RIP_AND_FINISH();
7348 IEM_MC_END();
7349 }
7350 }
7351 else
7352 {
7353 /**
7354 * @opdone
7355 * @opcode 0x7e
7356 * @opcodesub rex.w=0
7357 * @oppfx none
7358 * @opcpuid mmx
7359 * @opgroup og_mmx_datamove
7360 * @opxcpttype 5
7361 * @opfunction iemOp_movd_q_Pd_Ey
7362 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7363 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7364 */
7365 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7366 if (IEM_IS_MODRM_REG_MODE(bRm))
7367 {
7368 /* greg32, MMX */
7369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7370 IEM_MC_BEGIN(0, 1);
7371 IEM_MC_LOCAL(uint32_t, u32Tmp);
7372
7373 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7374 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7375 IEM_MC_FPU_TO_MMX_MODE();
7376
7377 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7378 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7379
7380 IEM_MC_ADVANCE_RIP_AND_FINISH();
7381 IEM_MC_END();
7382 }
7383 else
7384 {
7385 /* [mem32], MMX */
7386 IEM_MC_BEGIN(0, 2);
7387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7388 IEM_MC_LOCAL(uint32_t, u32Tmp);
7389
7390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7392 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7393 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7394 IEM_MC_FPU_TO_MMX_MODE();
7395
7396 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7397 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7398
7399 IEM_MC_ADVANCE_RIP_AND_FINISH();
7400 IEM_MC_END();
7401 }
7402 }
7403}
7404
7405
7406FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7407{
7408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7409 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7410 {
7411 /**
7412 * @opcode 0x7e
7413 * @opcodesub rex.w=1
7414 * @oppfx 0x66
7415 * @opcpuid sse2
7416 * @opgroup og_sse2_simdint_datamove
7417 * @opxcpttype 5
7418 * @optest 64-bit / op1=1 op2=2 -> op1=2
7419 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7420 */
7421 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7422 if (IEM_IS_MODRM_REG_MODE(bRm))
7423 {
7424 /* greg64, XMM */
7425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7426 IEM_MC_BEGIN(0, 1);
7427 IEM_MC_LOCAL(uint64_t, u64Tmp);
7428
7429 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7430 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7431
7432 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7433 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7434
7435 IEM_MC_ADVANCE_RIP_AND_FINISH();
7436 IEM_MC_END();
7437 }
7438 else
7439 {
7440 /* [mem64], XMM */
7441 IEM_MC_BEGIN(0, 2);
7442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7443 IEM_MC_LOCAL(uint64_t, u64Tmp);
7444
7445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7447 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7448 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7449
7450 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7451 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7452
7453 IEM_MC_ADVANCE_RIP_AND_FINISH();
7454 IEM_MC_END();
7455 }
7456 }
7457 else
7458 {
7459 /**
7460 * @opdone
7461 * @opcode 0x7e
7462 * @opcodesub rex.w=0
7463 * @oppfx 0x66
7464 * @opcpuid sse2
7465 * @opgroup og_sse2_simdint_datamove
7466 * @opxcpttype 5
7467 * @opfunction iemOp_movd_q_Vy_Ey
7468 * @optest op1=1 op2=2 -> op1=2
7469 * @optest op1=0 op2=-42 -> op1=-42
7470 */
7471 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7472 if (IEM_IS_MODRM_REG_MODE(bRm))
7473 {
7474 /* greg32, XMM */
7475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7476 IEM_MC_BEGIN(0, 1);
7477 IEM_MC_LOCAL(uint32_t, u32Tmp);
7478
7479 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7480 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7481
7482 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7483 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7484
7485 IEM_MC_ADVANCE_RIP_AND_FINISH();
7486 IEM_MC_END();
7487 }
7488 else
7489 {
7490 /* [mem32], XMM */
7491 IEM_MC_BEGIN(0, 2);
7492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7493 IEM_MC_LOCAL(uint32_t, u32Tmp);
7494
7495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7498 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7499
7500 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7501 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7502
7503 IEM_MC_ADVANCE_RIP_AND_FINISH();
7504 IEM_MC_END();
7505 }
7506 }
7507}
7508
7509/**
7510 * @opcode 0x7e
7511 * @oppfx 0xf3
7512 * @opcpuid sse2
7513 * @opgroup og_sse2_pcksclr_datamove
7514 * @opxcpttype none
7515 * @optest op1=1 op2=2 -> op1=2
7516 * @optest op1=0 op2=-42 -> op1=-42
7517 */
7518FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7519{
7520 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7522 if (IEM_IS_MODRM_REG_MODE(bRm))
7523 {
7524 /*
7525 * XMM128, XMM64.
7526 */
7527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7528 IEM_MC_BEGIN(0, 2);
7529 IEM_MC_LOCAL(uint64_t, uSrc);
7530
7531 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7532 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7533
7534 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7535 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7536
7537 IEM_MC_ADVANCE_RIP_AND_FINISH();
7538 IEM_MC_END();
7539 }
7540 else
7541 {
7542 /*
7543 * XMM128, [mem64].
7544 */
7545 IEM_MC_BEGIN(0, 2);
7546 IEM_MC_LOCAL(uint64_t, uSrc);
7547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7548
7549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7551 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7552 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7553
7554 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7555 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7556
7557 IEM_MC_ADVANCE_RIP_AND_FINISH();
7558 IEM_MC_END();
7559 }
7560}
7561
7562/* Opcode 0xf2 0x0f 0x7e - invalid */
7563
7564
7565/** Opcode 0x0f 0x7f - movq Qq, Pq */
7566FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7567{
7568 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7569 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7570 if (IEM_IS_MODRM_REG_MODE(bRm))
7571 {
7572 /*
7573 * MMX, MMX.
7574 */
7575 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7576 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7578 IEM_MC_BEGIN(0, 1);
7579 IEM_MC_LOCAL(uint64_t, u64Tmp);
7580 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7581 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7582 IEM_MC_FPU_TO_MMX_MODE();
7583
7584 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7585 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7586
7587 IEM_MC_ADVANCE_RIP_AND_FINISH();
7588 IEM_MC_END();
7589 }
7590 else
7591 {
7592 /*
7593 * [mem64], MMX.
7594 */
7595 IEM_MC_BEGIN(0, 2);
7596 IEM_MC_LOCAL(uint64_t, u64Tmp);
7597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7598
7599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7601 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7602 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7603 IEM_MC_FPU_TO_MMX_MODE();
7604
7605 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7606 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7607
7608 IEM_MC_ADVANCE_RIP_AND_FINISH();
7609 IEM_MC_END();
7610 }
7611}
7612
7613/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7614FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7615{
7616 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7617 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7618 if (IEM_IS_MODRM_REG_MODE(bRm))
7619 {
7620 /*
7621 * XMM, XMM.
7622 */
7623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7624 IEM_MC_BEGIN(0, 0);
7625 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7626 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7627 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7628 IEM_GET_MODRM_REG(pVCpu, bRm));
7629 IEM_MC_ADVANCE_RIP_AND_FINISH();
7630 IEM_MC_END();
7631 }
7632 else
7633 {
7634 /*
7635 * [mem128], XMM.
7636 */
7637 IEM_MC_BEGIN(0, 2);
7638 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7640
7641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7643 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7644 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7645
7646 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7647 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7648
7649 IEM_MC_ADVANCE_RIP_AND_FINISH();
7650 IEM_MC_END();
7651 }
7652}
7653
7654/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7655FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7656{
7657 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7658 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7659 if (IEM_IS_MODRM_REG_MODE(bRm))
7660 {
7661 /*
7662 * XMM, XMM.
7663 */
7664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7665 IEM_MC_BEGIN(0, 0);
7666 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7667 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7668 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7669 IEM_GET_MODRM_REG(pVCpu, bRm));
7670 IEM_MC_ADVANCE_RIP_AND_FINISH();
7671 IEM_MC_END();
7672 }
7673 else
7674 {
7675 /*
7676 * [mem128], XMM.
7677 */
7678 IEM_MC_BEGIN(0, 2);
7679 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7681
7682 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7684 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7685 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7686
7687 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7688 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7689
7690 IEM_MC_ADVANCE_RIP_AND_FINISH();
7691 IEM_MC_END();
7692 }
7693}
7694
7695/* Opcode 0xf2 0x0f 0x7f - invalid */
7696
7697
7698
7699/** Opcode 0x0f 0x80. */
7700FNIEMOP_DEF(iemOp_jo_Jv)
7701{
7702 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7703 IEMOP_HLP_MIN_386();
7704 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7705 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7706 {
7707 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7709
7710 IEM_MC_BEGIN(0, 0);
7711 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7712 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7713 } IEM_MC_ELSE() {
7714 IEM_MC_ADVANCE_RIP_AND_FINISH();
7715 } IEM_MC_ENDIF();
7716 IEM_MC_END();
7717 }
7718 else
7719 {
7720 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7722
7723 IEM_MC_BEGIN(0, 0);
7724 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7725 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7726 } IEM_MC_ELSE() {
7727 IEM_MC_ADVANCE_RIP_AND_FINISH();
7728 } IEM_MC_ENDIF();
7729 IEM_MC_END();
7730 }
7731}
7732
7733
7734/** Opcode 0x0f 0x81. */
7735FNIEMOP_DEF(iemOp_jno_Jv)
7736{
7737 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7738 IEMOP_HLP_MIN_386();
7739 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7740 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7741 {
7742 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7744
7745 IEM_MC_BEGIN(0, 0);
7746 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7747 IEM_MC_ADVANCE_RIP_AND_FINISH();
7748 } IEM_MC_ELSE() {
7749 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7750 } IEM_MC_ENDIF();
7751 IEM_MC_END();
7752 }
7753 else
7754 {
7755 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7757
7758 IEM_MC_BEGIN(0, 0);
7759 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7760 IEM_MC_ADVANCE_RIP_AND_FINISH();
7761 } IEM_MC_ELSE() {
7762 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7763 } IEM_MC_ENDIF();
7764 IEM_MC_END();
7765 }
7766}
7767
7768
7769/** Opcode 0x0f 0x82. */
7770FNIEMOP_DEF(iemOp_jc_Jv)
7771{
7772 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7773 IEMOP_HLP_MIN_386();
7774 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7775 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7776 {
7777 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7779
7780 IEM_MC_BEGIN(0, 0);
7781 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7782 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7783 } IEM_MC_ELSE() {
7784 IEM_MC_ADVANCE_RIP_AND_FINISH();
7785 } IEM_MC_ENDIF();
7786 IEM_MC_END();
7787 }
7788 else
7789 {
7790 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7792
7793 IEM_MC_BEGIN(0, 0);
7794 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7795 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7796 } IEM_MC_ELSE() {
7797 IEM_MC_ADVANCE_RIP_AND_FINISH();
7798 } IEM_MC_ENDIF();
7799 IEM_MC_END();
7800 }
7801}
7802
7803
7804/** Opcode 0x0f 0x83. */
7805FNIEMOP_DEF(iemOp_jnc_Jv)
7806{
7807 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7808 IEMOP_HLP_MIN_386();
7809 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7810 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7811 {
7812 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7814
7815 IEM_MC_BEGIN(0, 0);
7816 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7817 IEM_MC_ADVANCE_RIP_AND_FINISH();
7818 } IEM_MC_ELSE() {
7819 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7820 } IEM_MC_ENDIF();
7821 IEM_MC_END();
7822 }
7823 else
7824 {
7825 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7827
7828 IEM_MC_BEGIN(0, 0);
7829 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7830 IEM_MC_ADVANCE_RIP_AND_FINISH();
7831 } IEM_MC_ELSE() {
7832 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7833 } IEM_MC_ENDIF();
7834 IEM_MC_END();
7835 }
7836}
7837
7838
7839/** Opcode 0x0f 0x84. */
7840FNIEMOP_DEF(iemOp_je_Jv)
7841{
7842 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7843 IEMOP_HLP_MIN_386();
7844 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7845 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7846 {
7847 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7849
7850 IEM_MC_BEGIN(0, 0);
7851 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7852 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7853 } IEM_MC_ELSE() {
7854 IEM_MC_ADVANCE_RIP_AND_FINISH();
7855 } IEM_MC_ENDIF();
7856 IEM_MC_END();
7857 }
7858 else
7859 {
7860 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7862
7863 IEM_MC_BEGIN(0, 0);
7864 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7865 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7866 } IEM_MC_ELSE() {
7867 IEM_MC_ADVANCE_RIP_AND_FINISH();
7868 } IEM_MC_ENDIF();
7869 IEM_MC_END();
7870 }
7871}
7872
7873
7874/** Opcode 0x0f 0x85. */
7875FNIEMOP_DEF(iemOp_jne_Jv)
7876{
7877 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7878 IEMOP_HLP_MIN_386();
7879 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7880 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7881 {
7882 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7884
7885 IEM_MC_BEGIN(0, 0);
7886 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7887 IEM_MC_ADVANCE_RIP_AND_FINISH();
7888 } IEM_MC_ELSE() {
7889 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7890 } IEM_MC_ENDIF();
7891 IEM_MC_END();
7892 }
7893 else
7894 {
7895 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7897
7898 IEM_MC_BEGIN(0, 0);
7899 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7900 IEM_MC_ADVANCE_RIP_AND_FINISH();
7901 } IEM_MC_ELSE() {
7902 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7903 } IEM_MC_ENDIF();
7904 IEM_MC_END();
7905 }
7906}
7907
7908
7909/** Opcode 0x0f 0x86. */
7910FNIEMOP_DEF(iemOp_jbe_Jv)
7911{
7912 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7913 IEMOP_HLP_MIN_386();
7914 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7915 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7916 {
7917 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7919
7920 IEM_MC_BEGIN(0, 0);
7921 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7922 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7923 } IEM_MC_ELSE() {
7924 IEM_MC_ADVANCE_RIP_AND_FINISH();
7925 } IEM_MC_ENDIF();
7926 IEM_MC_END();
7927 }
7928 else
7929 {
7930 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7932
7933 IEM_MC_BEGIN(0, 0);
7934 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7935 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7936 } IEM_MC_ELSE() {
7937 IEM_MC_ADVANCE_RIP_AND_FINISH();
7938 } IEM_MC_ENDIF();
7939 IEM_MC_END();
7940 }
7941}
7942
7943
7944/** Opcode 0x0f 0x87. */
7945FNIEMOP_DEF(iemOp_jnbe_Jv)
7946{
7947 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7948 IEMOP_HLP_MIN_386();
7949 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7950 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7951 {
7952 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7954
7955 IEM_MC_BEGIN(0, 0);
7956 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7957 IEM_MC_ADVANCE_RIP_AND_FINISH();
7958 } IEM_MC_ELSE() {
7959 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7960 } IEM_MC_ENDIF();
7961 IEM_MC_END();
7962 }
7963 else
7964 {
7965 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7967
7968 IEM_MC_BEGIN(0, 0);
7969 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7970 IEM_MC_ADVANCE_RIP_AND_FINISH();
7971 } IEM_MC_ELSE() {
7972 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7973 } IEM_MC_ENDIF();
7974 IEM_MC_END();
7975 }
7976}
7977
7978
7979/** Opcode 0x0f 0x88. */
7980FNIEMOP_DEF(iemOp_js_Jv)
7981{
7982 IEMOP_MNEMONIC(js_Jv, "js Jv");
7983 IEMOP_HLP_MIN_386();
7984 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7985 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7986 {
7987 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7989
7990 IEM_MC_BEGIN(0, 0);
7991 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7992 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7993 } IEM_MC_ELSE() {
7994 IEM_MC_ADVANCE_RIP_AND_FINISH();
7995 } IEM_MC_ENDIF();
7996 IEM_MC_END();
7997 }
7998 else
7999 {
8000 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8002
8003 IEM_MC_BEGIN(0, 0);
8004 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8005 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8006 } IEM_MC_ELSE() {
8007 IEM_MC_ADVANCE_RIP_AND_FINISH();
8008 } IEM_MC_ENDIF();
8009 IEM_MC_END();
8010 }
8011}
8012
8013
8014/** Opcode 0x0f 0x89. */
8015FNIEMOP_DEF(iemOp_jns_Jv)
8016{
8017 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
8018 IEMOP_HLP_MIN_386();
8019 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8020 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8021 {
8022 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8024
8025 IEM_MC_BEGIN(0, 0);
8026 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8027 IEM_MC_ADVANCE_RIP_AND_FINISH();
8028 } IEM_MC_ELSE() {
8029 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8030 } IEM_MC_ENDIF();
8031 IEM_MC_END();
8032 }
8033 else
8034 {
8035 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8037
8038 IEM_MC_BEGIN(0, 0);
8039 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8040 IEM_MC_ADVANCE_RIP_AND_FINISH();
8041 } IEM_MC_ELSE() {
8042 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8043 } IEM_MC_ENDIF();
8044 IEM_MC_END();
8045 }
8046}
8047
8048
8049/** Opcode 0x0f 0x8a. */
8050FNIEMOP_DEF(iemOp_jp_Jv)
8051{
8052 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
8053 IEMOP_HLP_MIN_386();
8054 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8055 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8056 {
8057 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8059
8060 IEM_MC_BEGIN(0, 0);
8061 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8062 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8063 } IEM_MC_ELSE() {
8064 IEM_MC_ADVANCE_RIP_AND_FINISH();
8065 } IEM_MC_ENDIF();
8066 IEM_MC_END();
8067 }
8068 else
8069 {
8070 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8072
8073 IEM_MC_BEGIN(0, 0);
8074 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8075 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8076 } IEM_MC_ELSE() {
8077 IEM_MC_ADVANCE_RIP_AND_FINISH();
8078 } IEM_MC_ENDIF();
8079 IEM_MC_END();
8080 }
8081}
8082
8083
8084/** Opcode 0x0f 0x8b. */
8085FNIEMOP_DEF(iemOp_jnp_Jv)
8086{
8087 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
8088 IEMOP_HLP_MIN_386();
8089 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8090 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8091 {
8092 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8094
8095 IEM_MC_BEGIN(0, 0);
8096 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8097 IEM_MC_ADVANCE_RIP_AND_FINISH();
8098 } IEM_MC_ELSE() {
8099 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8100 } IEM_MC_ENDIF();
8101 IEM_MC_END();
8102 }
8103 else
8104 {
8105 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8107
8108 IEM_MC_BEGIN(0, 0);
8109 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8110 IEM_MC_ADVANCE_RIP_AND_FINISH();
8111 } IEM_MC_ELSE() {
8112 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8113 } IEM_MC_ENDIF();
8114 IEM_MC_END();
8115 }
8116}
8117
8118
8119/** Opcode 0x0f 0x8c. */
8120FNIEMOP_DEF(iemOp_jl_Jv)
8121{
8122 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8123 IEMOP_HLP_MIN_386();
8124 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8125 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8126 {
8127 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8129
8130 IEM_MC_BEGIN(0, 0);
8131 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8132 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8133 } IEM_MC_ELSE() {
8134 IEM_MC_ADVANCE_RIP_AND_FINISH();
8135 } IEM_MC_ENDIF();
8136 IEM_MC_END();
8137 }
8138 else
8139 {
8140 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8142
8143 IEM_MC_BEGIN(0, 0);
8144 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8145 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8146 } IEM_MC_ELSE() {
8147 IEM_MC_ADVANCE_RIP_AND_FINISH();
8148 } IEM_MC_ENDIF();
8149 IEM_MC_END();
8150 }
8151}
8152
8153
8154/** Opcode 0x0f 0x8d. */
8155FNIEMOP_DEF(iemOp_jnl_Jv)
8156{
8157 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8158 IEMOP_HLP_MIN_386();
8159 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8160 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8161 {
8162 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8164
8165 IEM_MC_BEGIN(0, 0);
8166 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8167 IEM_MC_ADVANCE_RIP_AND_FINISH();
8168 } IEM_MC_ELSE() {
8169 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8170 } IEM_MC_ENDIF();
8171 IEM_MC_END();
8172 }
8173 else
8174 {
8175 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8177
8178 IEM_MC_BEGIN(0, 0);
8179 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8180 IEM_MC_ADVANCE_RIP_AND_FINISH();
8181 } IEM_MC_ELSE() {
8182 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8183 } IEM_MC_ENDIF();
8184 IEM_MC_END();
8185 }
8186}
8187
8188
8189/** Opcode 0x0f 0x8e. */
8190FNIEMOP_DEF(iemOp_jle_Jv)
8191{
8192 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8193 IEMOP_HLP_MIN_386();
8194 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8195 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8196 {
8197 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8199
8200 IEM_MC_BEGIN(0, 0);
8201 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8202 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8203 } IEM_MC_ELSE() {
8204 IEM_MC_ADVANCE_RIP_AND_FINISH();
8205 } IEM_MC_ENDIF();
8206 IEM_MC_END();
8207 }
8208 else
8209 {
8210 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8212
8213 IEM_MC_BEGIN(0, 0);
8214 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8215 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8216 } IEM_MC_ELSE() {
8217 IEM_MC_ADVANCE_RIP_AND_FINISH();
8218 } IEM_MC_ENDIF();
8219 IEM_MC_END();
8220 }
8221}
8222
8223
8224/** Opcode 0x0f 0x8f. */
8225FNIEMOP_DEF(iemOp_jnle_Jv)
8226{
8227 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8228 IEMOP_HLP_MIN_386();
8229 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8230 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8231 {
8232 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8234
8235 IEM_MC_BEGIN(0, 0);
8236 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8237 IEM_MC_ADVANCE_RIP_AND_FINISH();
8238 } IEM_MC_ELSE() {
8239 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8240 } IEM_MC_ENDIF();
8241 IEM_MC_END();
8242 }
8243 else
8244 {
8245 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8247
8248 IEM_MC_BEGIN(0, 0);
8249 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8250 IEM_MC_ADVANCE_RIP_AND_FINISH();
8251 } IEM_MC_ELSE() {
8252 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8253 } IEM_MC_ENDIF();
8254 IEM_MC_END();
8255 }
8256}
8257
8258
8259/** Opcode 0x0f 0x90. */
8260FNIEMOP_DEF(iemOp_seto_Eb)
8261{
8262 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8263 IEMOP_HLP_MIN_386();
8264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8265
8266 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8267 * any way. AMD says it's "unused", whatever that means. We're
8268 * ignoring for now. */
8269 if (IEM_IS_MODRM_REG_MODE(bRm))
8270 {
8271 /* register target */
8272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8273 IEM_MC_BEGIN(0, 0);
8274 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8275 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8276 } IEM_MC_ELSE() {
8277 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8278 } IEM_MC_ENDIF();
8279 IEM_MC_ADVANCE_RIP_AND_FINISH();
8280 IEM_MC_END();
8281 }
8282 else
8283 {
8284 /* memory target */
8285 IEM_MC_BEGIN(0, 1);
8286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8289 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8290 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8291 } IEM_MC_ELSE() {
8292 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8293 } IEM_MC_ENDIF();
8294 IEM_MC_ADVANCE_RIP_AND_FINISH();
8295 IEM_MC_END();
8296 }
8297}
8298
8299
8300/** Opcode 0x0f 0x91. */
8301FNIEMOP_DEF(iemOp_setno_Eb)
8302{
8303 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8304 IEMOP_HLP_MIN_386();
8305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8306
8307 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8308 * any way. AMD says it's "unused", whatever that means. We're
8309 * ignoring for now. */
8310 if (IEM_IS_MODRM_REG_MODE(bRm))
8311 {
8312 /* register target */
8313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8314 IEM_MC_BEGIN(0, 0);
8315 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8316 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8317 } IEM_MC_ELSE() {
8318 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8319 } IEM_MC_ENDIF();
8320 IEM_MC_ADVANCE_RIP_AND_FINISH();
8321 IEM_MC_END();
8322 }
8323 else
8324 {
8325 /* memory target */
8326 IEM_MC_BEGIN(0, 1);
8327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8330 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8331 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8332 } IEM_MC_ELSE() {
8333 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8334 } IEM_MC_ENDIF();
8335 IEM_MC_ADVANCE_RIP_AND_FINISH();
8336 IEM_MC_END();
8337 }
8338}
8339
8340
8341/** Opcode 0x0f 0x92. */
8342FNIEMOP_DEF(iemOp_setc_Eb)
8343{
8344 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8345 IEMOP_HLP_MIN_386();
8346 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8347
8348 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8349 * any way. AMD says it's "unused", whatever that means. We're
8350 * ignoring for now. */
8351 if (IEM_IS_MODRM_REG_MODE(bRm))
8352 {
8353 /* register target */
8354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8355 IEM_MC_BEGIN(0, 0);
8356 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8357 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8358 } IEM_MC_ELSE() {
8359 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8360 } IEM_MC_ENDIF();
8361 IEM_MC_ADVANCE_RIP_AND_FINISH();
8362 IEM_MC_END();
8363 }
8364 else
8365 {
8366 /* memory target */
8367 IEM_MC_BEGIN(0, 1);
8368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8371 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8372 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8373 } IEM_MC_ELSE() {
8374 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8375 } IEM_MC_ENDIF();
8376 IEM_MC_ADVANCE_RIP_AND_FINISH();
8377 IEM_MC_END();
8378 }
8379}
8380
8381
8382/** Opcode 0x0f 0x93. */
8383FNIEMOP_DEF(iemOp_setnc_Eb)
8384{
8385 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8386 IEMOP_HLP_MIN_386();
8387 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8388
8389 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8390 * any way. AMD says it's "unused", whatever that means. We're
8391 * ignoring for now. */
8392 if (IEM_IS_MODRM_REG_MODE(bRm))
8393 {
8394 /* register target */
8395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8396 IEM_MC_BEGIN(0, 0);
8397 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8398 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8399 } IEM_MC_ELSE() {
8400 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8401 } IEM_MC_ENDIF();
8402 IEM_MC_ADVANCE_RIP_AND_FINISH();
8403 IEM_MC_END();
8404 }
8405 else
8406 {
8407 /* memory target */
8408 IEM_MC_BEGIN(0, 1);
8409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8412 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8413 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8414 } IEM_MC_ELSE() {
8415 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8416 } IEM_MC_ENDIF();
8417 IEM_MC_ADVANCE_RIP_AND_FINISH();
8418 IEM_MC_END();
8419 }
8420}
8421
8422
8423/** Opcode 0x0f 0x94. */
8424FNIEMOP_DEF(iemOp_sete_Eb)
8425{
8426 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8427 IEMOP_HLP_MIN_386();
8428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8429
8430 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8431 * any way. AMD says it's "unused", whatever that means. We're
8432 * ignoring for now. */
8433 if (IEM_IS_MODRM_REG_MODE(bRm))
8434 {
8435 /* register target */
8436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8437 IEM_MC_BEGIN(0, 0);
8438 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8439 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8440 } IEM_MC_ELSE() {
8441 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8442 } IEM_MC_ENDIF();
8443 IEM_MC_ADVANCE_RIP_AND_FINISH();
8444 IEM_MC_END();
8445 }
8446 else
8447 {
8448 /* memory target */
8449 IEM_MC_BEGIN(0, 1);
8450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8453 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8454 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8455 } IEM_MC_ELSE() {
8456 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8457 } IEM_MC_ENDIF();
8458 IEM_MC_ADVANCE_RIP_AND_FINISH();
8459 IEM_MC_END();
8460 }
8461}
8462
8463
8464/** Opcode 0x0f 0x95. */
8465FNIEMOP_DEF(iemOp_setne_Eb)
8466{
8467 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8468 IEMOP_HLP_MIN_386();
8469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8470
8471 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8472 * any way. AMD says it's "unused", whatever that means. We're
8473 * ignoring for now. */
8474 if (IEM_IS_MODRM_REG_MODE(bRm))
8475 {
8476 /* register target */
8477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8478 IEM_MC_BEGIN(0, 0);
8479 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8480 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8481 } IEM_MC_ELSE() {
8482 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8483 } IEM_MC_ENDIF();
8484 IEM_MC_ADVANCE_RIP_AND_FINISH();
8485 IEM_MC_END();
8486 }
8487 else
8488 {
8489 /* memory target */
8490 IEM_MC_BEGIN(0, 1);
8491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8494 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8495 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8496 } IEM_MC_ELSE() {
8497 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8498 } IEM_MC_ENDIF();
8499 IEM_MC_ADVANCE_RIP_AND_FINISH();
8500 IEM_MC_END();
8501 }
8502}
8503
8504
8505/** Opcode 0x0f 0x96. */
8506FNIEMOP_DEF(iemOp_setbe_Eb)
8507{
8508 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8509 IEMOP_HLP_MIN_386();
8510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8511
8512 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8513 * any way. AMD says it's "unused", whatever that means. We're
8514 * ignoring for now. */
8515 if (IEM_IS_MODRM_REG_MODE(bRm))
8516 {
8517 /* register target */
8518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8519 IEM_MC_BEGIN(0, 0);
8520 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8521 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8522 } IEM_MC_ELSE() {
8523 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8524 } IEM_MC_ENDIF();
8525 IEM_MC_ADVANCE_RIP_AND_FINISH();
8526 IEM_MC_END();
8527 }
8528 else
8529 {
8530 /* memory target */
8531 IEM_MC_BEGIN(0, 1);
8532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8535 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8536 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8537 } IEM_MC_ELSE() {
8538 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8539 } IEM_MC_ENDIF();
8540 IEM_MC_ADVANCE_RIP_AND_FINISH();
8541 IEM_MC_END();
8542 }
8543}
8544
8545
8546/** Opcode 0x0f 0x97. */
8547FNIEMOP_DEF(iemOp_setnbe_Eb)
8548{
8549 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8550 IEMOP_HLP_MIN_386();
8551 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8552
8553 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8554 * any way. AMD says it's "unused", whatever that means. We're
8555 * ignoring for now. */
8556 if (IEM_IS_MODRM_REG_MODE(bRm))
8557 {
8558 /* register target */
8559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8560 IEM_MC_BEGIN(0, 0);
8561 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8562 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8563 } IEM_MC_ELSE() {
8564 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8565 } IEM_MC_ENDIF();
8566 IEM_MC_ADVANCE_RIP_AND_FINISH();
8567 IEM_MC_END();
8568 }
8569 else
8570 {
8571 /* memory target */
8572 IEM_MC_BEGIN(0, 1);
8573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8576 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8577 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8578 } IEM_MC_ELSE() {
8579 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8580 } IEM_MC_ENDIF();
8581 IEM_MC_ADVANCE_RIP_AND_FINISH();
8582 IEM_MC_END();
8583 }
8584}
8585
8586
8587/** Opcode 0x0f 0x98. */
8588FNIEMOP_DEF(iemOp_sets_Eb)
8589{
8590 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8591 IEMOP_HLP_MIN_386();
8592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8593
8594 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8595 * any way. AMD says it's "unused", whatever that means. We're
8596 * ignoring for now. */
8597 if (IEM_IS_MODRM_REG_MODE(bRm))
8598 {
8599 /* register target */
8600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8601 IEM_MC_BEGIN(0, 0);
8602 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8603 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8604 } IEM_MC_ELSE() {
8605 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8606 } IEM_MC_ENDIF();
8607 IEM_MC_ADVANCE_RIP_AND_FINISH();
8608 IEM_MC_END();
8609 }
8610 else
8611 {
8612 /* memory target */
8613 IEM_MC_BEGIN(0, 1);
8614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8617 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8618 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8619 } IEM_MC_ELSE() {
8620 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8621 } IEM_MC_ENDIF();
8622 IEM_MC_ADVANCE_RIP_AND_FINISH();
8623 IEM_MC_END();
8624 }
8625}
8626
8627
8628/** Opcode 0x0f 0x99. */
8629FNIEMOP_DEF(iemOp_setns_Eb)
8630{
8631 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8632 IEMOP_HLP_MIN_386();
8633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8634
8635 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8636 * any way. AMD says it's "unused", whatever that means. We're
8637 * ignoring for now. */
8638 if (IEM_IS_MODRM_REG_MODE(bRm))
8639 {
8640 /* register target */
8641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8642 IEM_MC_BEGIN(0, 0);
8643 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8644 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8645 } IEM_MC_ELSE() {
8646 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8647 } IEM_MC_ENDIF();
8648 IEM_MC_ADVANCE_RIP_AND_FINISH();
8649 IEM_MC_END();
8650 }
8651 else
8652 {
8653 /* memory target */
8654 IEM_MC_BEGIN(0, 1);
8655 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8658 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8659 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8660 } IEM_MC_ELSE() {
8661 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8662 } IEM_MC_ENDIF();
8663 IEM_MC_ADVANCE_RIP_AND_FINISH();
8664 IEM_MC_END();
8665 }
8666}
8667
8668
8669/** Opcode 0x0f 0x9a. */
8670FNIEMOP_DEF(iemOp_setp_Eb)
8671{
8672 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8673 IEMOP_HLP_MIN_386();
8674 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8675
8676 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8677 * any way. AMD says it's "unused", whatever that means. We're
8678 * ignoring for now. */
8679 if (IEM_IS_MODRM_REG_MODE(bRm))
8680 {
8681 /* register target */
8682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8683 IEM_MC_BEGIN(0, 0);
8684 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8685 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8686 } IEM_MC_ELSE() {
8687 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8688 } IEM_MC_ENDIF();
8689 IEM_MC_ADVANCE_RIP_AND_FINISH();
8690 IEM_MC_END();
8691 }
8692 else
8693 {
8694 /* memory target */
8695 IEM_MC_BEGIN(0, 1);
8696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8699 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8700 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8701 } IEM_MC_ELSE() {
8702 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8703 } IEM_MC_ENDIF();
8704 IEM_MC_ADVANCE_RIP_AND_FINISH();
8705 IEM_MC_END();
8706 }
8707}
8708
8709
8710/** Opcode 0x0f 0x9b. */
8711FNIEMOP_DEF(iemOp_setnp_Eb)
8712{
8713 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8714 IEMOP_HLP_MIN_386();
8715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8716
8717 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8718 * any way. AMD says it's "unused", whatever that means. We're
8719 * ignoring for now. */
8720 if (IEM_IS_MODRM_REG_MODE(bRm))
8721 {
8722 /* register target */
8723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8724 IEM_MC_BEGIN(0, 0);
8725 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8726 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8727 } IEM_MC_ELSE() {
8728 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8729 } IEM_MC_ENDIF();
8730 IEM_MC_ADVANCE_RIP_AND_FINISH();
8731 IEM_MC_END();
8732 }
8733 else
8734 {
8735 /* memory target */
8736 IEM_MC_BEGIN(0, 1);
8737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8740 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8741 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8742 } IEM_MC_ELSE() {
8743 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8744 } IEM_MC_ENDIF();
8745 IEM_MC_ADVANCE_RIP_AND_FINISH();
8746 IEM_MC_END();
8747 }
8748}
8749
8750
8751/** Opcode 0x0f 0x9c. */
8752FNIEMOP_DEF(iemOp_setl_Eb)
8753{
8754 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8755 IEMOP_HLP_MIN_386();
8756 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8757
8758 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8759 * any way. AMD says it's "unused", whatever that means. We're
8760 * ignoring for now. */
8761 if (IEM_IS_MODRM_REG_MODE(bRm))
8762 {
8763 /* register target */
8764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8765 IEM_MC_BEGIN(0, 0);
8766 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8767 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8768 } IEM_MC_ELSE() {
8769 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8770 } IEM_MC_ENDIF();
8771 IEM_MC_ADVANCE_RIP_AND_FINISH();
8772 IEM_MC_END();
8773 }
8774 else
8775 {
8776 /* memory target */
8777 IEM_MC_BEGIN(0, 1);
8778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8781 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8782 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8783 } IEM_MC_ELSE() {
8784 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8785 } IEM_MC_ENDIF();
8786 IEM_MC_ADVANCE_RIP_AND_FINISH();
8787 IEM_MC_END();
8788 }
8789}
8790
8791
8792/** Opcode 0x0f 0x9d. */
8793FNIEMOP_DEF(iemOp_setnl_Eb)
8794{
8795 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8796 IEMOP_HLP_MIN_386();
8797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8798
8799 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8800 * any way. AMD says it's "unused", whatever that means. We're
8801 * ignoring for now. */
8802 if (IEM_IS_MODRM_REG_MODE(bRm))
8803 {
8804 /* register target */
8805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8806 IEM_MC_BEGIN(0, 0);
8807 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8808 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8809 } IEM_MC_ELSE() {
8810 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8811 } IEM_MC_ENDIF();
8812 IEM_MC_ADVANCE_RIP_AND_FINISH();
8813 IEM_MC_END();
8814 }
8815 else
8816 {
8817 /* memory target */
8818 IEM_MC_BEGIN(0, 1);
8819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8822 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8823 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8824 } IEM_MC_ELSE() {
8825 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8826 } IEM_MC_ENDIF();
8827 IEM_MC_ADVANCE_RIP_AND_FINISH();
8828 IEM_MC_END();
8829 }
8830}
8831
8832
8833/** Opcode 0x0f 0x9e. */
8834FNIEMOP_DEF(iemOp_setle_Eb)
8835{
8836 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8837 IEMOP_HLP_MIN_386();
8838 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8839
8840 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8841 * any way. AMD says it's "unused", whatever that means. We're
8842 * ignoring for now. */
8843 if (IEM_IS_MODRM_REG_MODE(bRm))
8844 {
8845 /* register target */
8846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8847 IEM_MC_BEGIN(0, 0);
8848 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8849 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8850 } IEM_MC_ELSE() {
8851 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8852 } IEM_MC_ENDIF();
8853 IEM_MC_ADVANCE_RIP_AND_FINISH();
8854 IEM_MC_END();
8855 }
8856 else
8857 {
8858 /* memory target */
8859 IEM_MC_BEGIN(0, 1);
8860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8863 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8864 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8865 } IEM_MC_ELSE() {
8866 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8867 } IEM_MC_ENDIF();
8868 IEM_MC_ADVANCE_RIP_AND_FINISH();
8869 IEM_MC_END();
8870 }
8871}
8872
8873
8874/** Opcode 0x0f 0x9f. */
8875FNIEMOP_DEF(iemOp_setnle_Eb)
8876{
8877 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8878 IEMOP_HLP_MIN_386();
8879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8880
8881 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8882 * any way. AMD says it's "unused", whatever that means. We're
8883 * ignoring for now. */
8884 if (IEM_IS_MODRM_REG_MODE(bRm))
8885 {
8886 /* register target */
8887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8888 IEM_MC_BEGIN(0, 0);
8889 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8890 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8891 } IEM_MC_ELSE() {
8892 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8893 } IEM_MC_ENDIF();
8894 IEM_MC_ADVANCE_RIP_AND_FINISH();
8895 IEM_MC_END();
8896 }
8897 else
8898 {
8899 /* memory target */
8900 IEM_MC_BEGIN(0, 1);
8901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8904 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8905 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8906 } IEM_MC_ELSE() {
8907 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8908 } IEM_MC_ENDIF();
8909 IEM_MC_ADVANCE_RIP_AND_FINISH();
8910 IEM_MC_END();
8911 }
8912}
8913
8914
8915/**
8916 * Common 'push segment-register' helper.
8917 */
8918FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
8919{
8920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8921 Assert(iReg < X86_SREG_FS || !IEM_IS_64BIT_CODE(pVCpu));
8922 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8923
8924 switch (pVCpu->iem.s.enmEffOpSize)
8925 {
8926 case IEMMODE_16BIT:
8927 IEM_MC_BEGIN(0, 1);
8928 IEM_MC_LOCAL(uint16_t, u16Value);
8929 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
8930 IEM_MC_PUSH_U16(u16Value);
8931 IEM_MC_ADVANCE_RIP_AND_FINISH();
8932 IEM_MC_END();
8933 break;
8934
8935 case IEMMODE_32BIT:
8936 IEM_MC_BEGIN(0, 1);
8937 IEM_MC_LOCAL(uint32_t, u32Value);
8938 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
8939 IEM_MC_PUSH_U32_SREG(u32Value);
8940 IEM_MC_ADVANCE_RIP_AND_FINISH();
8941 IEM_MC_END();
8942 break;
8943
8944 case IEMMODE_64BIT:
8945 IEM_MC_BEGIN(0, 1);
8946 IEM_MC_LOCAL(uint64_t, u64Value);
8947 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
8948 IEM_MC_PUSH_U64(u64Value);
8949 IEM_MC_ADVANCE_RIP_AND_FINISH();
8950 IEM_MC_END();
8951 break;
8952
8953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8954 }
8955}
8956
8957
8958/** Opcode 0x0f 0xa0. */
8959FNIEMOP_DEF(iemOp_push_fs)
8960{
8961 IEMOP_MNEMONIC(push_fs, "push fs");
8962 IEMOP_HLP_MIN_386();
8963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8964 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8965}
8966
8967
8968/** Opcode 0x0f 0xa1. */
8969FNIEMOP_DEF(iemOp_pop_fs)
8970{
8971 IEMOP_MNEMONIC(pop_fs, "pop fs");
8972 IEMOP_HLP_MIN_386();
8973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8974 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8975}
8976
8977
8978/** Opcode 0x0f 0xa2. */
8979FNIEMOP_DEF(iemOp_cpuid)
8980{
8981 IEMOP_MNEMONIC(cpuid, "cpuid");
8982 IEMOP_HLP_MIN_486(); /* not all 486es. */
8983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8984 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_cpuid);
8985}
8986
8987
8988/**
8989 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8990 * iemOp_bts_Ev_Gv.
8991 */
8992#define IEMOP_BODY_BIT_Ev_Gv(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
8993 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8994 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8995 \
8996 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8997 { \
8998 /* register destination. */ \
8999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9000 switch (pVCpu->iem.s.enmEffOpSize) \
9001 { \
9002 case IEMMODE_16BIT: \
9003 IEM_MC_BEGIN(3, 0); \
9004 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9005 IEM_MC_ARG(uint16_t, u16Src, 1); \
9006 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9007 \
9008 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9009 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9010 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9011 IEM_MC_REF_EFLAGS(pEFlags); \
9012 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9013 \
9014 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9015 IEM_MC_END(); \
9016 break; \
9017 \
9018 case IEMMODE_32BIT: \
9019 IEM_MC_BEGIN(3, 0); \
9020 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9021 IEM_MC_ARG(uint32_t, u32Src, 1); \
9022 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9023 \
9024 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9025 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9026 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9027 IEM_MC_REF_EFLAGS(pEFlags); \
9028 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9029 \
9030 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
9031 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9032 IEM_MC_END(); \
9033 break; \
9034 \
9035 case IEMMODE_64BIT: \
9036 IEM_MC_BEGIN(3, 0); \
9037 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9038 IEM_MC_ARG(uint64_t, u64Src, 1); \
9039 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9040 \
9041 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9042 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9043 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9044 IEM_MC_REF_EFLAGS(pEFlags); \
9045 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9046 \
9047 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9048 IEM_MC_END(); \
9049 break; \
9050 \
9051 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9052 } \
9053 } \
9054 else \
9055 { \
9056 /* memory destination. */ \
9057 /** @todo test negative bit offsets! */ \
9058 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9059 { \
9060 switch (pVCpu->iem.s.enmEffOpSize) \
9061 { \
9062 case IEMMODE_16BIT: \
9063 IEM_MC_BEGIN(3, 2); \
9064 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9065 IEM_MC_ARG(uint16_t, u16Src, 1); \
9066 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9068 IEM_MC_LOCAL(int16_t, i16AddrAdj); \
9069 \
9070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9071 IEMOP_HLP_DONE_DECODING(); \
9072 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9073 IEM_MC_ASSIGN(i16AddrAdj, u16Src); \
9074 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9075 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9076 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9077 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9078 IEM_MC_FETCH_EFLAGS(EFlags); \
9079 \
9080 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9081 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9082 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
9083 \
9084 IEM_MC_COMMIT_EFLAGS(EFlags); \
9085 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9086 IEM_MC_END(); \
9087 break; \
9088 \
9089 case IEMMODE_32BIT: \
9090 IEM_MC_BEGIN(3, 2); \
9091 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9092 IEM_MC_ARG(uint32_t, u32Src, 1); \
9093 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9095 IEM_MC_LOCAL(int32_t, i32AddrAdj); \
9096 \
9097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9098 IEMOP_HLP_DONE_DECODING(); \
9099 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9100 IEM_MC_ASSIGN(i32AddrAdj, u32Src); \
9101 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9102 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9103 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9104 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9105 IEM_MC_FETCH_EFLAGS(EFlags); \
9106 \
9107 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9109 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
9110 \
9111 IEM_MC_COMMIT_EFLAGS(EFlags); \
9112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9113 IEM_MC_END(); \
9114 break; \
9115 \
9116 case IEMMODE_64BIT: \
9117 IEM_MC_BEGIN(3, 2); \
9118 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9119 IEM_MC_ARG(uint64_t, u64Src, 1); \
9120 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9122 IEM_MC_LOCAL(int64_t, i64AddrAdj); \
9123 \
9124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9125 IEMOP_HLP_DONE_DECODING(); \
9126 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9127 IEM_MC_ASSIGN(i64AddrAdj, u64Src); \
9128 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9129 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9130 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9131 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9132 IEM_MC_FETCH_EFLAGS(EFlags); \
9133 \
9134 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9135 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9136 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
9137 \
9138 IEM_MC_COMMIT_EFLAGS(EFlags); \
9139 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9140 IEM_MC_END(); \
9141 break; \
9142 \
9143 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9144 } \
9145 } \
9146 else \
9147 { \
9148 (void)0
9149
9150#define IEMOP_BODY_BIT_Ev_Gv_NO_LOCK() \
9151 IEMOP_HLP_DONE_DECODING(); \
9152 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9153 } \
9154 } \
9155 (void)0
9156
9157#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
9158 switch (pVCpu->iem.s.enmEffOpSize) \
9159 { \
9160 case IEMMODE_16BIT: \
9161 IEM_MC_BEGIN(3, 2); \
9162 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9163 IEM_MC_ARG(uint16_t, u16Src, 1); \
9164 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9166 IEM_MC_LOCAL(int16_t, i16AddrAdj); \
9167 \
9168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9169 IEMOP_HLP_DONE_DECODING(); \
9170 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9171 IEM_MC_ASSIGN(i16AddrAdj, u16Src); \
9172 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9173 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9174 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9175 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9176 IEM_MC_FETCH_EFLAGS(EFlags); \
9177 \
9178 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9179 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
9180 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
9181 \
9182 IEM_MC_COMMIT_EFLAGS(EFlags); \
9183 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9184 IEM_MC_END(); \
9185 break; \
9186 \
9187 case IEMMODE_32BIT: \
9188 IEM_MC_BEGIN(3, 2); \
9189 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9190 IEM_MC_ARG(uint32_t, u32Src, 1); \
9191 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9193 IEM_MC_LOCAL(int32_t, i32AddrAdj); \
9194 \
9195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9196 IEMOP_HLP_DONE_DECODING(); \
9197 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9198 IEM_MC_ASSIGN(i32AddrAdj, u32Src); \
9199 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9200 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9201 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9202 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9203 IEM_MC_FETCH_EFLAGS(EFlags); \
9204 \
9205 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9206 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
9207 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
9208 \
9209 IEM_MC_COMMIT_EFLAGS(EFlags); \
9210 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9211 IEM_MC_END(); \
9212 break; \
9213 \
9214 case IEMMODE_64BIT: \
9215 IEM_MC_BEGIN(3, 2); \
9216 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9217 IEM_MC_ARG(uint64_t, u64Src, 1); \
9218 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9220 IEM_MC_LOCAL(int64_t, i64AddrAdj); \
9221 \
9222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9223 IEMOP_HLP_DONE_DECODING(); \
9224 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9225 IEM_MC_ASSIGN(i64AddrAdj, u64Src); \
9226 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9227 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9228 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9229 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9230 IEM_MC_FETCH_EFLAGS(EFlags); \
9231 \
9232 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9233 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
9234 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
9235 \
9236 IEM_MC_COMMIT_EFLAGS(EFlags); \
9237 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9238 IEM_MC_END(); \
9239 break; \
9240 \
9241 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9242 } \
9243 } \
9244 } \
9245 (void)0
9246
9247
9248/** Opcode 0x0f 0xa3. */
9249FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9250{
9251 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9252 IEMOP_HLP_MIN_386();
9253 IEMOP_BODY_BIT_Ev_Gv(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64, IEM_ACCESS_DATA_R);
9254 IEMOP_BODY_BIT_Ev_Gv_NO_LOCK();
9255}
9256
9257
9258/**
9259 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9260 */
9261FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
9262{
9263 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9264 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9265
9266 if (IEM_IS_MODRM_REG_MODE(bRm))
9267 {
9268 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9270
9271 switch (pVCpu->iem.s.enmEffOpSize)
9272 {
9273 case IEMMODE_16BIT:
9274 IEM_MC_BEGIN(4, 0);
9275 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9276 IEM_MC_ARG(uint16_t, u16Src, 1);
9277 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9278 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9279
9280 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9281 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9282 IEM_MC_REF_EFLAGS(pEFlags);
9283 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9284
9285 IEM_MC_ADVANCE_RIP_AND_FINISH();
9286 IEM_MC_END();
9287 break;
9288
9289 case IEMMODE_32BIT:
9290 IEM_MC_BEGIN(4, 0);
9291 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9292 IEM_MC_ARG(uint32_t, u32Src, 1);
9293 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9294 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9295
9296 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9297 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9298 IEM_MC_REF_EFLAGS(pEFlags);
9299 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9300
9301 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9302 IEM_MC_ADVANCE_RIP_AND_FINISH();
9303 IEM_MC_END();
9304 break;
9305
9306 case IEMMODE_64BIT:
9307 IEM_MC_BEGIN(4, 0);
9308 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9309 IEM_MC_ARG(uint64_t, u64Src, 1);
9310 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9311 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9312
9313 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9314 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9315 IEM_MC_REF_EFLAGS(pEFlags);
9316 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9317
9318 IEM_MC_ADVANCE_RIP_AND_FINISH();
9319 IEM_MC_END();
9320 break;
9321
9322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9323 }
9324 }
9325 else
9326 {
9327 switch (pVCpu->iem.s.enmEffOpSize)
9328 {
9329 case IEMMODE_16BIT:
9330 IEM_MC_BEGIN(4, 2);
9331 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9332 IEM_MC_ARG(uint16_t, u16Src, 1);
9333 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9334 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9336
9337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9338 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9339 IEM_MC_ASSIGN(cShiftArg, cShift);
9340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9341 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9342 IEM_MC_FETCH_EFLAGS(EFlags);
9343 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9344 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9345
9346 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9347 IEM_MC_COMMIT_EFLAGS(EFlags);
9348 IEM_MC_ADVANCE_RIP_AND_FINISH();
9349 IEM_MC_END();
9350 break;
9351
9352 case IEMMODE_32BIT:
9353 IEM_MC_BEGIN(4, 2);
9354 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9355 IEM_MC_ARG(uint32_t, u32Src, 1);
9356 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9357 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9359
9360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9361 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9362 IEM_MC_ASSIGN(cShiftArg, cShift);
9363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9364 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9365 IEM_MC_FETCH_EFLAGS(EFlags);
9366 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9367 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9368
9369 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9370 IEM_MC_COMMIT_EFLAGS(EFlags);
9371 IEM_MC_ADVANCE_RIP_AND_FINISH();
9372 IEM_MC_END();
9373 break;
9374
9375 case IEMMODE_64BIT:
9376 IEM_MC_BEGIN(4, 2);
9377 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9378 IEM_MC_ARG(uint64_t, u64Src, 1);
9379 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9380 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9382
9383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9384 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9385 IEM_MC_ASSIGN(cShiftArg, cShift);
9386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9387 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9388 IEM_MC_FETCH_EFLAGS(EFlags);
9389 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9390 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9391
9392 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9393 IEM_MC_COMMIT_EFLAGS(EFlags);
9394 IEM_MC_ADVANCE_RIP_AND_FINISH();
9395 IEM_MC_END();
9396 break;
9397
9398 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9399 }
9400 }
9401}
9402
9403
9404/**
9405 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9406 */
9407FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
9408{
9409 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9410 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9411
9412 if (IEM_IS_MODRM_REG_MODE(bRm))
9413 {
9414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9415
9416 switch (pVCpu->iem.s.enmEffOpSize)
9417 {
9418 case IEMMODE_16BIT:
9419 IEM_MC_BEGIN(4, 0);
9420 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9421 IEM_MC_ARG(uint16_t, u16Src, 1);
9422 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9423 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9424
9425 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9426 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9427 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9428 IEM_MC_REF_EFLAGS(pEFlags);
9429 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9430
9431 IEM_MC_ADVANCE_RIP_AND_FINISH();
9432 IEM_MC_END();
9433 break;
9434
9435 case IEMMODE_32BIT:
9436 IEM_MC_BEGIN(4, 0);
9437 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9438 IEM_MC_ARG(uint32_t, u32Src, 1);
9439 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9440 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9441
9442 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9443 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9444 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9445 IEM_MC_REF_EFLAGS(pEFlags);
9446 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9447
9448 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9449 IEM_MC_ADVANCE_RIP_AND_FINISH();
9450 IEM_MC_END();
9451 break;
9452
9453 case IEMMODE_64BIT:
9454 IEM_MC_BEGIN(4, 0);
9455 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9456 IEM_MC_ARG(uint64_t, u64Src, 1);
9457 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9458 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9459
9460 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9461 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9462 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9463 IEM_MC_REF_EFLAGS(pEFlags);
9464 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9465
9466 IEM_MC_ADVANCE_RIP_AND_FINISH();
9467 IEM_MC_END();
9468 break;
9469
9470 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9471 }
9472 }
9473 else
9474 {
9475 switch (pVCpu->iem.s.enmEffOpSize)
9476 {
9477 case IEMMODE_16BIT:
9478 IEM_MC_BEGIN(4, 2);
9479 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9480 IEM_MC_ARG(uint16_t, u16Src, 1);
9481 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9482 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9484
9485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9487 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9488 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9489 IEM_MC_FETCH_EFLAGS(EFlags);
9490 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9491 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9492
9493 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9494 IEM_MC_COMMIT_EFLAGS(EFlags);
9495 IEM_MC_ADVANCE_RIP_AND_FINISH();
9496 IEM_MC_END();
9497 break;
9498
9499 case IEMMODE_32BIT:
9500 IEM_MC_BEGIN(4, 2);
9501 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9502 IEM_MC_ARG(uint32_t, u32Src, 1);
9503 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9504 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9506
9507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9509 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9510 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9511 IEM_MC_FETCH_EFLAGS(EFlags);
9512 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9513 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9514
9515 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9516 IEM_MC_COMMIT_EFLAGS(EFlags);
9517 IEM_MC_ADVANCE_RIP_AND_FINISH();
9518 IEM_MC_END();
9519 break;
9520
9521 case IEMMODE_64BIT:
9522 IEM_MC_BEGIN(4, 2);
9523 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9524 IEM_MC_ARG(uint64_t, u64Src, 1);
9525 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9526 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9528
9529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9531 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9532 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9533 IEM_MC_FETCH_EFLAGS(EFlags);
9534 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9535 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9536
9537 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9538 IEM_MC_COMMIT_EFLAGS(EFlags);
9539 IEM_MC_ADVANCE_RIP_AND_FINISH();
9540 IEM_MC_END();
9541 break;
9542
9543 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9544 }
9545 }
9546}
9547
9548
9549
9550/** Opcode 0x0f 0xa4. */
9551FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9552{
9553 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9554 IEMOP_HLP_MIN_386();
9555 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9556}
9557
9558
9559/** Opcode 0x0f 0xa5. */
9560FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9561{
9562 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9563 IEMOP_HLP_MIN_386();
9564 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9565}
9566
9567
9568/** Opcode 0x0f 0xa8. */
9569FNIEMOP_DEF(iemOp_push_gs)
9570{
9571 IEMOP_MNEMONIC(push_gs, "push gs");
9572 IEMOP_HLP_MIN_386();
9573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9574 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9575}
9576
9577
9578/** Opcode 0x0f 0xa9. */
9579FNIEMOP_DEF(iemOp_pop_gs)
9580{
9581 IEMOP_MNEMONIC(pop_gs, "pop gs");
9582 IEMOP_HLP_MIN_386();
9583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9584 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9585}
9586
9587
9588/** Opcode 0x0f 0xaa. */
9589FNIEMOP_DEF(iemOp_rsm)
9590{
9591 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9592 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9594 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
9595 iemCImpl_rsm);
9596}
9597
9598
9599
9600/** Opcode 0x0f 0xab. */
9601FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9602{
9603 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9604 IEMOP_HLP_MIN_386();
9605 IEMOP_BODY_BIT_Ev_Gv( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64, IEM_ACCESS_DATA_RW);
9606 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9607}
9608
9609
9610/** Opcode 0x0f 0xac. */
9611FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9612{
9613 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9614 IEMOP_HLP_MIN_386();
9615 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9616}
9617
9618
9619/** Opcode 0x0f 0xad. */
9620FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9621{
9622 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9623 IEMOP_HLP_MIN_386();
9624 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9625}
9626
9627
9628/** Opcode 0x0f 0xae mem/0. */
9629FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9630{
9631 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9632 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9633 IEMOP_RAISE_INVALID_OPCODE_RET();
9634
9635 IEM_MC_BEGIN(3, 1);
9636 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9637 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9638 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9641 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9642 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9643 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9644 IEM_MC_END();
9645}
9646
9647
9648/** Opcode 0x0f 0xae mem/1. */
9649FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9650{
9651 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9652 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9653 IEMOP_RAISE_INVALID_OPCODE_RET();
9654
9655 IEM_MC_BEGIN(3, 1);
9656 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9657 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9658 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9661 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9662 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9663 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9664 IEM_MC_END();
9665}
9666
9667
9668/**
9669 * @opmaps grp15
9670 * @opcode !11/2
9671 * @oppfx none
9672 * @opcpuid sse
9673 * @opgroup og_sse_mxcsrsm
9674 * @opxcpttype 5
9675 * @optest op1=0 -> mxcsr=0
9676 * @optest op1=0x2083 -> mxcsr=0x2083
9677 * @optest op1=0xfffffffe -> value.xcpt=0xd
9678 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9679 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9680 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9681 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9682 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9683 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9684 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9685 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9686 */
9687FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9688{
9689 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9690 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9691 IEMOP_RAISE_INVALID_OPCODE_RET();
9692
9693 IEM_MC_BEGIN(2, 0);
9694 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9695 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9698 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9699 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9700 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9701 IEM_MC_END();
9702}
9703
9704
9705/**
9706 * @opmaps grp15
9707 * @opcode !11/3
9708 * @oppfx none
9709 * @opcpuid sse
9710 * @opgroup og_sse_mxcsrsm
9711 * @opxcpttype 5
9712 * @optest mxcsr=0 -> op1=0
9713 * @optest mxcsr=0x2083 -> op1=0x2083
9714 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9715 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9716 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9717 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9718 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9719 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9720 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9721 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9722 */
9723FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9724{
9725 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9726 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9727 IEMOP_RAISE_INVALID_OPCODE_RET();
9728
9729 IEM_MC_BEGIN(2, 0);
9730 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9731 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9734 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9735 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9736 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9737 IEM_MC_END();
9738}
9739
9740
9741/**
9742 * @opmaps grp15
9743 * @opcode !11/4
9744 * @oppfx none
9745 * @opcpuid xsave
9746 * @opgroup og_system
9747 * @opxcpttype none
9748 */
9749FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9750{
9751 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9752 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9753 IEMOP_RAISE_INVALID_OPCODE_RET();
9754
9755 IEM_MC_BEGIN(3, 0);
9756 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9757 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9758 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9761 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9762 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9763 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9764 IEM_MC_END();
9765}
9766
9767
9768/**
9769 * @opmaps grp15
9770 * @opcode !11/5
9771 * @oppfx none
9772 * @opcpuid xsave
9773 * @opgroup og_system
9774 * @opxcpttype none
9775 */
9776FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9777{
9778 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9779 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9780 IEMOP_RAISE_INVALID_OPCODE_RET();
9781
9782 IEM_MC_BEGIN(3, 0);
9783 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9784 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9785 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9788 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9789 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9790 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9791 IEM_MC_END();
9792}
9793
9794/** Opcode 0x0f 0xae mem/6. */
9795FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9796
9797/**
9798 * @opmaps grp15
9799 * @opcode !11/7
9800 * @oppfx none
9801 * @opcpuid clfsh
9802 * @opgroup og_cachectl
9803 * @optest op1=1 ->
9804 */
9805FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9806{
9807 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9808 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9809 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9810
9811 IEM_MC_BEGIN(2, 0);
9812 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9813 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9816 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9817 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9818 IEM_MC_END();
9819}
9820
9821/**
9822 * @opmaps grp15
9823 * @opcode !11/7
9824 * @oppfx 0x66
9825 * @opcpuid clflushopt
9826 * @opgroup og_cachectl
9827 * @optest op1=1 ->
9828 */
9829FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9830{
9831 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9832 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9833 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9834
9835 IEM_MC_BEGIN(2, 0);
9836 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9837 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9840 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9841 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9842 IEM_MC_END();
9843}
9844
9845
9846/** Opcode 0x0f 0xae 11b/5. */
9847FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9848{
9849 RT_NOREF_PV(bRm);
9850 IEMOP_MNEMONIC(lfence, "lfence");
9851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9852 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9853 IEMOP_RAISE_INVALID_OPCODE_RET();
9854
9855 IEM_MC_BEGIN(0, 0);
9856#ifdef RT_ARCH_ARM64
9857 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9858#else
9859 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9860 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9861 else
9862 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9863#endif
9864 IEM_MC_ADVANCE_RIP_AND_FINISH();
9865 IEM_MC_END();
9866}
9867
9868
9869/** Opcode 0x0f 0xae 11b/6. */
9870FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9871{
9872 RT_NOREF_PV(bRm);
9873 IEMOP_MNEMONIC(mfence, "mfence");
9874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9875 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9876 IEMOP_RAISE_INVALID_OPCODE_RET();
9877
9878 IEM_MC_BEGIN(0, 0);
9879#ifdef RT_ARCH_ARM64
9880 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9881#else
9882 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9883 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9884 else
9885 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9886#endif
9887 IEM_MC_ADVANCE_RIP_AND_FINISH();
9888 IEM_MC_END();
9889}
9890
9891
9892/** Opcode 0x0f 0xae 11b/7. */
9893FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9894{
9895 RT_NOREF_PV(bRm);
9896 IEMOP_MNEMONIC(sfence, "sfence");
9897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9898 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9899 IEMOP_RAISE_INVALID_OPCODE_RET();
9900
9901 IEM_MC_BEGIN(0, 0);
9902#ifdef RT_ARCH_ARM64
9903 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9904#else
9905 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9906 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9907 else
9908 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9909#endif
9910 IEM_MC_ADVANCE_RIP_AND_FINISH();
9911 IEM_MC_END();
9912}
9913
9914
9915/** Opcode 0xf3 0x0f 0xae 11b/0. */
9916FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9917{
9918 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9920 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9921 {
9922 IEM_MC_BEGIN(1, 0);
9923 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9924 IEM_MC_ARG(uint64_t, u64Dst, 0);
9925 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9926 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9927 IEM_MC_ADVANCE_RIP_AND_FINISH();
9928 IEM_MC_END();
9929 }
9930 else
9931 {
9932 IEM_MC_BEGIN(1, 0);
9933 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9934 IEM_MC_ARG(uint32_t, u32Dst, 0);
9935 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
9936 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9937 IEM_MC_ADVANCE_RIP_AND_FINISH();
9938 IEM_MC_END();
9939 }
9940}
9941
9942
9943/** Opcode 0xf3 0x0f 0xae 11b/1. */
9944FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
9945{
9946 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
9947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9948 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9949 {
9950 IEM_MC_BEGIN(1, 0);
9951 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9952 IEM_MC_ARG(uint64_t, u64Dst, 0);
9953 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
9954 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9955 IEM_MC_ADVANCE_RIP_AND_FINISH();
9956 IEM_MC_END();
9957 }
9958 else
9959 {
9960 IEM_MC_BEGIN(1, 0);
9961 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9962 IEM_MC_ARG(uint32_t, u32Dst, 0);
9963 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
9964 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9965 IEM_MC_ADVANCE_RIP_AND_FINISH();
9966 IEM_MC_END();
9967 }
9968}
9969
9970
9971/** Opcode 0xf3 0x0f 0xae 11b/2. */
9972FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
9973{
9974 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
9975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9976 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9977 {
9978 IEM_MC_BEGIN(1, 0);
9979 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9980 IEM_MC_ARG(uint64_t, u64Dst, 0);
9981 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9982 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9983 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
9984 IEM_MC_ADVANCE_RIP_AND_FINISH();
9985 IEM_MC_END();
9986 }
9987 else
9988 {
9989 IEM_MC_BEGIN(1, 0);
9990 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9991 IEM_MC_ARG(uint32_t, u32Dst, 0);
9992 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9993 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
9994 IEM_MC_ADVANCE_RIP_AND_FINISH();
9995 IEM_MC_END();
9996 }
9997}
9998
9999
10000/** Opcode 0xf3 0x0f 0xae 11b/3. */
10001FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
10002{
10003 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
10004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10005 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10006 {
10007 IEM_MC_BEGIN(1, 0);
10008 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10009 IEM_MC_ARG(uint64_t, u64Dst, 0);
10010 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10011 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10012 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
10013 IEM_MC_ADVANCE_RIP_AND_FINISH();
10014 IEM_MC_END();
10015 }
10016 else
10017 {
10018 IEM_MC_BEGIN(1, 0);
10019 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10020 IEM_MC_ARG(uint32_t, u32Dst, 0);
10021 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10022 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10023 IEM_MC_ADVANCE_RIP_AND_FINISH();
10024 IEM_MC_END();
10025 }
10026}
10027
10028
10029/**
10030 * Group 15 jump table for register variant.
10031 */
10032IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10033{ /* pfx: none, 066h, 0f3h, 0f2h */
10034 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10035 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10036 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10037 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10038 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10039 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10040 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10041 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10042};
10043AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10044
10045
10046/**
10047 * Group 15 jump table for memory variant.
10048 */
10049IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10050{ /* pfx: none, 066h, 0f3h, 0f2h */
10051 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10052 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10053 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10054 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10055 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10056 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10057 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10058 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10059};
10060AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10061
10062
10063/** Opcode 0x0f 0xae. */
10064FNIEMOP_DEF(iemOp_Grp15)
10065{
10066 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10067 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10068 if (IEM_IS_MODRM_REG_MODE(bRm))
10069 /* register, register */
10070 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10071 + pVCpu->iem.s.idxPrefix], bRm);
10072 /* memory, register */
10073 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10074 + pVCpu->iem.s.idxPrefix], bRm);
10075}
10076
10077
10078/** Opcode 0x0f 0xaf. */
10079FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10080{
10081 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10082 IEMOP_HLP_MIN_386();
10083 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10084 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10085 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
10086}
10087
10088
10089/** Opcode 0x0f 0xb0. */
10090FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10091{
10092 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10093 IEMOP_HLP_MIN_486();
10094 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10095
10096 if (IEM_IS_MODRM_REG_MODE(bRm))
10097 {
10098 IEMOP_HLP_DONE_DECODING();
10099 IEM_MC_BEGIN(4, 0);
10100 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10101 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10102 IEM_MC_ARG(uint8_t, u8Src, 2);
10103 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10104
10105 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10106 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10107 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10108 IEM_MC_REF_EFLAGS(pEFlags);
10109 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10110 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10111 else
10112 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10113
10114 IEM_MC_ADVANCE_RIP_AND_FINISH();
10115 IEM_MC_END();
10116 }
10117 else
10118 {
10119 IEM_MC_BEGIN(4, 3);
10120 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10121 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10122 IEM_MC_ARG(uint8_t, u8Src, 2);
10123 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10125 IEM_MC_LOCAL(uint8_t, u8Al);
10126
10127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10128 IEMOP_HLP_DONE_DECODING();
10129 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10130 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10131 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
10132 IEM_MC_FETCH_EFLAGS(EFlags);
10133 IEM_MC_REF_LOCAL(pu8Al, u8Al);
10134 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10135 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10136 else
10137 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10138
10139 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10140 IEM_MC_COMMIT_EFLAGS(EFlags);
10141 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
10142 IEM_MC_ADVANCE_RIP_AND_FINISH();
10143 IEM_MC_END();
10144 }
10145}
10146
10147/** Opcode 0x0f 0xb1. */
10148FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10149{
10150 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10151 IEMOP_HLP_MIN_486();
10152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10153
10154 if (IEM_IS_MODRM_REG_MODE(bRm))
10155 {
10156 IEMOP_HLP_DONE_DECODING();
10157 switch (pVCpu->iem.s.enmEffOpSize)
10158 {
10159 case IEMMODE_16BIT:
10160 IEM_MC_BEGIN(4, 0);
10161 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10162 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10163 IEM_MC_ARG(uint16_t, u16Src, 2);
10164 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10165
10166 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10167 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10168 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10169 IEM_MC_REF_EFLAGS(pEFlags);
10170 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10171 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10172 else
10173 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10174
10175 IEM_MC_ADVANCE_RIP_AND_FINISH();
10176 IEM_MC_END();
10177 break;
10178
10179 case IEMMODE_32BIT:
10180 IEM_MC_BEGIN(4, 0);
10181 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10182 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10183 IEM_MC_ARG(uint32_t, u32Src, 2);
10184 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10185
10186 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10187 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10188 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10189 IEM_MC_REF_EFLAGS(pEFlags);
10190 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10191 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10192 else
10193 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10194
10195 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10196 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10197 } IEM_MC_ELSE() {
10198 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
10199 } IEM_MC_ENDIF();
10200
10201 IEM_MC_ADVANCE_RIP_AND_FINISH();
10202 IEM_MC_END();
10203 break;
10204
10205 case IEMMODE_64BIT:
10206 IEM_MC_BEGIN(4, 0);
10207 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10208 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10209#ifdef RT_ARCH_X86
10210 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10211#else
10212 IEM_MC_ARG(uint64_t, u64Src, 2);
10213#endif
10214 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10215
10216 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10217 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10218 IEM_MC_REF_EFLAGS(pEFlags);
10219#ifdef RT_ARCH_X86
10220 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10221 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10222 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10223 else
10224 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10225#else
10226 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10227 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10228 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10229 else
10230 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10231#endif
10232
10233 IEM_MC_ADVANCE_RIP_AND_FINISH();
10234 IEM_MC_END();
10235 break;
10236
10237 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10238 }
10239 }
10240 else
10241 {
10242 switch (pVCpu->iem.s.enmEffOpSize)
10243 {
10244 case IEMMODE_16BIT:
10245 IEM_MC_BEGIN(4, 3);
10246 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10247 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10248 IEM_MC_ARG(uint16_t, u16Src, 2);
10249 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10251 IEM_MC_LOCAL(uint16_t, u16Ax);
10252
10253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10254 IEMOP_HLP_DONE_DECODING();
10255 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10256 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10257 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
10258 IEM_MC_FETCH_EFLAGS(EFlags);
10259 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
10260 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10261 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10262 else
10263 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10264
10265 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10266 IEM_MC_COMMIT_EFLAGS(EFlags);
10267 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
10268 IEM_MC_ADVANCE_RIP_AND_FINISH();
10269 IEM_MC_END();
10270 break;
10271
10272 case IEMMODE_32BIT:
10273 IEM_MC_BEGIN(4, 3);
10274 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10275 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10276 IEM_MC_ARG(uint32_t, u32Src, 2);
10277 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10279 IEM_MC_LOCAL(uint32_t, u32Eax);
10280
10281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10282 IEMOP_HLP_DONE_DECODING();
10283 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10284 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10285 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
10286 IEM_MC_FETCH_EFLAGS(EFlags);
10287 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
10288 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10289 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10290 else
10291 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10292
10293 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10294 IEM_MC_COMMIT_EFLAGS(EFlags);
10295
10296 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10297 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
10298 } IEM_MC_ENDIF();
10299
10300 IEM_MC_ADVANCE_RIP_AND_FINISH();
10301 IEM_MC_END();
10302 break;
10303
10304 case IEMMODE_64BIT:
10305 IEM_MC_BEGIN(4, 3);
10306 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10307 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10308#ifdef RT_ARCH_X86
10309 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10310#else
10311 IEM_MC_ARG(uint64_t, u64Src, 2);
10312#endif
10313 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10315 IEM_MC_LOCAL(uint64_t, u64Rax);
10316
10317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10318 IEMOP_HLP_DONE_DECODING();
10319 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10320 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
10321 IEM_MC_FETCH_EFLAGS(EFlags);
10322 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
10323#ifdef RT_ARCH_X86
10324 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10325 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10326 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10327 else
10328 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10329#else
10330 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10331 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10332 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10333 else
10334 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10335#endif
10336
10337 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10338 IEM_MC_COMMIT_EFLAGS(EFlags);
10339 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
10340 IEM_MC_ADVANCE_RIP_AND_FINISH();
10341 IEM_MC_END();
10342 break;
10343
10344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10345 }
10346 }
10347}
10348
10349
10350FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
10351{
10352 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
10353 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
10354
10355 switch (pVCpu->iem.s.enmEffOpSize)
10356 {
10357 case IEMMODE_16BIT:
10358 IEM_MC_BEGIN(5, 1);
10359 IEM_MC_ARG(uint16_t, uSel, 0);
10360 IEM_MC_ARG(uint16_t, offSeg, 1);
10361 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10362 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10363 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10364 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10367 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10368 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
10369 if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
10370 IEM_MC_CALL_CIMPL_5( 0, iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10371 else
10372 IEM_MC_CALL_CIMPL_5(IEM_CIMPL_F_MODE, iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10373 IEM_MC_END();
10374
10375 case IEMMODE_32BIT:
10376 IEM_MC_BEGIN(5, 1);
10377 IEM_MC_ARG(uint16_t, uSel, 0);
10378 IEM_MC_ARG(uint32_t, offSeg, 1);
10379 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10380 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10381 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10382 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10385 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10386 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
10387 if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
10388 IEM_MC_CALL_CIMPL_5( 0, iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10389 else
10390 IEM_MC_CALL_CIMPL_5(IEM_CIMPL_F_MODE, iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10391 IEM_MC_END();
10392
10393 case IEMMODE_64BIT:
10394 IEM_MC_BEGIN(5, 1);
10395 IEM_MC_ARG(uint16_t, uSel, 0);
10396 IEM_MC_ARG(uint64_t, offSeg, 1);
10397 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10398 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10399 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10400 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10401 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10403 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
10404 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10405 else
10406 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10407 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
10408 IEM_MC_CALL_CIMPL_5(0, iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10409 IEM_MC_END();
10410
10411 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10412 }
10413}
10414
10415
10416/** Opcode 0x0f 0xb2. */
10417FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10418{
10419 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10420 IEMOP_HLP_MIN_386();
10421 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10422 if (IEM_IS_MODRM_REG_MODE(bRm))
10423 IEMOP_RAISE_INVALID_OPCODE_RET();
10424 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10425}
10426
10427
10428/** Opcode 0x0f 0xb3. */
10429FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10430{
10431 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10432 IEMOP_HLP_MIN_386();
10433 IEMOP_BODY_BIT_Ev_Gv( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64, IEM_ACCESS_DATA_RW);
10434 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10435}
10436
10437
10438/** Opcode 0x0f 0xb4. */
10439FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10440{
10441 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10442 IEMOP_HLP_MIN_386();
10443 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10444 if (IEM_IS_MODRM_REG_MODE(bRm))
10445 IEMOP_RAISE_INVALID_OPCODE_RET();
10446 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10447}
10448
10449
10450/** Opcode 0x0f 0xb5. */
10451FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10452{
10453 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10454 IEMOP_HLP_MIN_386();
10455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10456 if (IEM_IS_MODRM_REG_MODE(bRm))
10457 IEMOP_RAISE_INVALID_OPCODE_RET();
10458 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10459}
10460
10461
10462/** Opcode 0x0f 0xb6. */
10463FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10464{
10465 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10466 IEMOP_HLP_MIN_386();
10467
10468 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10469
10470 /*
10471 * If rm is denoting a register, no more instruction bytes.
10472 */
10473 if (IEM_IS_MODRM_REG_MODE(bRm))
10474 {
10475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10476 switch (pVCpu->iem.s.enmEffOpSize)
10477 {
10478 case IEMMODE_16BIT:
10479 IEM_MC_BEGIN(0, 1);
10480 IEM_MC_LOCAL(uint16_t, u16Value);
10481 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10482 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10483 IEM_MC_ADVANCE_RIP_AND_FINISH();
10484 IEM_MC_END();
10485 break;
10486
10487 case IEMMODE_32BIT:
10488 IEM_MC_BEGIN(0, 1);
10489 IEM_MC_LOCAL(uint32_t, u32Value);
10490 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10491 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10492 IEM_MC_ADVANCE_RIP_AND_FINISH();
10493 IEM_MC_END();
10494 break;
10495
10496 case IEMMODE_64BIT:
10497 IEM_MC_BEGIN(0, 1);
10498 IEM_MC_LOCAL(uint64_t, u64Value);
10499 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10500 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10501 IEM_MC_ADVANCE_RIP_AND_FINISH();
10502 IEM_MC_END();
10503 break;
10504
10505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10506 }
10507 }
10508 else
10509 {
10510 /*
10511 * We're loading a register from memory.
10512 */
10513 switch (pVCpu->iem.s.enmEffOpSize)
10514 {
10515 case IEMMODE_16BIT:
10516 IEM_MC_BEGIN(0, 2);
10517 IEM_MC_LOCAL(uint16_t, u16Value);
10518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10521 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10522 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10523 IEM_MC_ADVANCE_RIP_AND_FINISH();
10524 IEM_MC_END();
10525 break;
10526
10527 case IEMMODE_32BIT:
10528 IEM_MC_BEGIN(0, 2);
10529 IEM_MC_LOCAL(uint32_t, u32Value);
10530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10533 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10534 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10535 IEM_MC_ADVANCE_RIP_AND_FINISH();
10536 IEM_MC_END();
10537 break;
10538
10539 case IEMMODE_64BIT:
10540 IEM_MC_BEGIN(0, 2);
10541 IEM_MC_LOCAL(uint64_t, u64Value);
10542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10545 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10546 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10547 IEM_MC_ADVANCE_RIP_AND_FINISH();
10548 IEM_MC_END();
10549 break;
10550
10551 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10552 }
10553 }
10554}
10555
10556
10557/** Opcode 0x0f 0xb7. */
10558FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10559{
10560 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10561 IEMOP_HLP_MIN_386();
10562
10563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10564
10565 /** @todo Not entirely sure how the operand size prefix is handled here,
10566 * assuming that it will be ignored. Would be nice to have a few
10567 * test for this. */
10568 /*
10569 * If rm is denoting a register, no more instruction bytes.
10570 */
10571 if (IEM_IS_MODRM_REG_MODE(bRm))
10572 {
10573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10574 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10575 {
10576 IEM_MC_BEGIN(0, 1);
10577 IEM_MC_LOCAL(uint32_t, u32Value);
10578 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10579 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10580 IEM_MC_ADVANCE_RIP_AND_FINISH();
10581 IEM_MC_END();
10582 }
10583 else
10584 {
10585 IEM_MC_BEGIN(0, 1);
10586 IEM_MC_LOCAL(uint64_t, u64Value);
10587 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10588 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10589 IEM_MC_ADVANCE_RIP_AND_FINISH();
10590 IEM_MC_END();
10591 }
10592 }
10593 else
10594 {
10595 /*
10596 * We're loading a register from memory.
10597 */
10598 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10599 {
10600 IEM_MC_BEGIN(0, 2);
10601 IEM_MC_LOCAL(uint32_t, u32Value);
10602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10605 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10606 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10607 IEM_MC_ADVANCE_RIP_AND_FINISH();
10608 IEM_MC_END();
10609 }
10610 else
10611 {
10612 IEM_MC_BEGIN(0, 2);
10613 IEM_MC_LOCAL(uint64_t, u64Value);
10614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10617 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10618 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10619 IEM_MC_ADVANCE_RIP_AND_FINISH();
10620 IEM_MC_END();
10621 }
10622 }
10623}
10624
10625
10626/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10627FNIEMOP_UD_STUB(iemOp_jmpe);
10628
10629
10630/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
10631FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10632{
10633 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10634 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10635 return iemOp_InvalidNeedRM(pVCpu);
10636#ifndef TST_IEM_CHECK_MC
10637# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10638 static const IEMOPBINSIZES s_Native =
10639 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10640# endif
10641 static const IEMOPBINSIZES s_Fallback =
10642 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10643#endif
10644 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10645 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
10646}
10647
10648
10649/**
10650 * @opcode 0xb9
10651 * @opinvalid intel-modrm
10652 * @optest ->
10653 */
10654FNIEMOP_DEF(iemOp_Grp10)
10655{
10656 /*
10657 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10658 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10659 */
10660 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10661 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10662 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10663}
10664
10665
10666/**
10667 * Body for group 8 bit instruction.
10668 */
10669#define IEMOP_BODY_BIT_Ev_Ib(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
10670 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10671 \
10672 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10673 { \
10674 /* register destination. */ \
10675 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10677 \
10678 switch (pVCpu->iem.s.enmEffOpSize) \
10679 { \
10680 case IEMMODE_16BIT: \
10681 IEM_MC_BEGIN(3, 0); \
10682 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10683 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10684 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10685 \
10686 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10687 IEM_MC_REF_EFLAGS(pEFlags); \
10688 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10689 \
10690 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10691 IEM_MC_END(); \
10692 break; \
10693 \
10694 case IEMMODE_32BIT: \
10695 IEM_MC_BEGIN(3, 0); \
10696 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10697 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10698 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10699 \
10700 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10701 IEM_MC_REF_EFLAGS(pEFlags); \
10702 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10703 \
10704 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
10705 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10706 IEM_MC_END(); \
10707 break; \
10708 \
10709 case IEMMODE_64BIT: \
10710 IEM_MC_BEGIN(3, 0); \
10711 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10712 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10713 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10714 \
10715 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10716 IEM_MC_REF_EFLAGS(pEFlags); \
10717 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10718 \
10719 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10720 IEM_MC_END(); \
10721 break; \
10722 \
10723 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10724 } \
10725 } \
10726 else \
10727 { \
10728 /* memory destination. */ \
10729 /** @todo test negative bit offsets! */ \
10730 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10731 { \
10732 switch (pVCpu->iem.s.enmEffOpSize) \
10733 { \
10734 case IEMMODE_16BIT: \
10735 IEM_MC_BEGIN(3, 1); \
10736 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10737 IEM_MC_ARG(uint16_t, u16Src, 1); \
10738 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10740 \
10741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10742 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10743 IEM_MC_ASSIGN(u16Src, bImm & 0x0f); \
10744 IEMOP_HLP_DONE_DECODING(); \
10745 IEM_MC_FETCH_EFLAGS(EFlags); \
10746 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10747 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10748 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
10749 \
10750 IEM_MC_COMMIT_EFLAGS(EFlags); \
10751 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10752 IEM_MC_END(); \
10753 break; \
10754 \
10755 case IEMMODE_32BIT: \
10756 IEM_MC_BEGIN(3, 1); \
10757 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10758 IEM_MC_ARG(uint32_t, u32Src, 1); \
10759 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10761 \
10762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10763 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10764 IEM_MC_ASSIGN(u32Src, bImm & 0x1f); \
10765 IEMOP_HLP_DONE_DECODING(); \
10766 IEM_MC_FETCH_EFLAGS(EFlags); \
10767 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10768 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10769 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
10770 \
10771 IEM_MC_COMMIT_EFLAGS(EFlags); \
10772 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10773 IEM_MC_END(); \
10774 break; \
10775 \
10776 case IEMMODE_64BIT: \
10777 IEM_MC_BEGIN(3, 1); \
10778 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10779 IEM_MC_ARG(uint64_t, u64Src, 1); \
10780 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10782 \
10783 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10784 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10785 IEM_MC_ASSIGN(u64Src, bImm & 0x3f); \
10786 IEMOP_HLP_DONE_DECODING(); \
10787 IEM_MC_FETCH_EFLAGS(EFlags); \
10788 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10789 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10790 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
10791 \
10792 IEM_MC_COMMIT_EFLAGS(EFlags); \
10793 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10794 IEM_MC_END(); \
10795 break; \
10796 \
10797 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10798 } \
10799 } \
10800 else \
10801 { \
10802 (void)0
10803
10804#define IEMOP_BODY_BIT_Ev_Ib_NO_LOCK() \
10805 IEMOP_HLP_DONE_DECODING(); \
10806 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
10807 } \
10808 } \
10809 (void)0
10810
10811#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10812 switch (pVCpu->iem.s.enmEffOpSize) \
10813 { \
10814 case IEMMODE_16BIT: \
10815 IEM_MC_BEGIN(3, 1); \
10816 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10817 IEM_MC_ARG(uint16_t, u16Src, 1); \
10818 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10820 \
10821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10822 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10823 IEM_MC_ASSIGN(u16Src, bImm & 0x0f); \
10824 IEMOP_HLP_DONE_DECODING(); \
10825 IEM_MC_FETCH_EFLAGS(EFlags); \
10826 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10827 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
10828 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
10829 \
10830 IEM_MC_COMMIT_EFLAGS(EFlags); \
10831 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10832 IEM_MC_END(); \
10833 break; \
10834 \
10835 case IEMMODE_32BIT: \
10836 IEM_MC_BEGIN(3, 1); \
10837 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10838 IEM_MC_ARG(uint32_t, u32Src, 1); \
10839 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10840 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10841 \
10842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10843 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10844 IEM_MC_ASSIGN(u32Src, bImm & 0x1f); \
10845 IEMOP_HLP_DONE_DECODING(); \
10846 IEM_MC_FETCH_EFLAGS(EFlags); \
10847 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10848 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
10849 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
10850 \
10851 IEM_MC_COMMIT_EFLAGS(EFlags); \
10852 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10853 IEM_MC_END(); \
10854 break; \
10855 \
10856 case IEMMODE_64BIT: \
10857 IEM_MC_BEGIN(3, 1); \
10858 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10859 IEM_MC_ARG(uint64_t, u64Src, 1); \
10860 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10862 \
10863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10864 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10865 IEM_MC_ASSIGN(u64Src, bImm & 0x3f); \
10866 IEMOP_HLP_DONE_DECODING(); \
10867 IEM_MC_FETCH_EFLAGS(EFlags); \
10868 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10869 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
10870 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
10871 \
10872 IEM_MC_COMMIT_EFLAGS(EFlags); \
10873 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10874 IEM_MC_END(); \
10875 break; \
10876 \
10877 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10878 } \
10879 } \
10880 } \
10881 (void)0
10882
10883
10884/** Opcode 0x0f 0xba /4. */
10885FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
10886{
10887 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
10888 IEMOP_BODY_BIT_Ev_Ib(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64, IEM_ACCESS_DATA_R);
10889 IEMOP_BODY_BIT_Ev_Ib_NO_LOCK();
10890}
10891
10892
10893/** Opcode 0x0f 0xba /5. */
10894FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
10895{
10896 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
10897 IEMOP_BODY_BIT_Ev_Ib( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64, IEM_ACCESS_DATA_RW);
10898 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
10899}
10900
10901
10902/** Opcode 0x0f 0xba /6. */
10903FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
10904{
10905 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
10906 IEMOP_BODY_BIT_Ev_Ib( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64, IEM_ACCESS_DATA_RW);
10907 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10908}
10909
10910
10911/** Opcode 0x0f 0xba /7. */
10912FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
10913{
10914 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
10915 IEMOP_BODY_BIT_Ev_Ib( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64, IEM_ACCESS_DATA_RW);
10916 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
10917}
10918
10919
10920/** Opcode 0x0f 0xba. */
10921FNIEMOP_DEF(iemOp_Grp8)
10922{
10923 IEMOP_HLP_MIN_386();
10924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10925 switch (IEM_GET_MODRM_REG_8(bRm))
10926 {
10927 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
10928 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
10929 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
10930 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
10931
10932 case 0: case 1: case 2: case 3:
10933 /* Both AMD and Intel want full modr/m decoding and imm8. */
10934 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
10935
10936 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10937 }
10938}
10939
10940
10941/** Opcode 0x0f 0xbb. */
10942FNIEMOP_DEF(iemOp_btc_Ev_Gv)
10943{
10944 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
10945 IEMOP_HLP_MIN_386();
10946 IEMOP_BODY_BIT_Ev_Gv( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64, IEM_ACCESS_DATA_RW);
10947 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
10948}
10949
10950
10951/**
10952 * Common worker for BSF and BSR instructions.
10953 *
10954 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
10955 * the destination register, which means that for 32-bit operations the high
10956 * bits must be left alone.
10957 *
10958 * @param pImpl Pointer to the instruction implementation (assembly).
10959 */
10960FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
10961{
10962 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10963
10964 /*
10965 * If rm is denoting a register, no more instruction bytes.
10966 */
10967 if (IEM_IS_MODRM_REG_MODE(bRm))
10968 {
10969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10970 switch (pVCpu->iem.s.enmEffOpSize)
10971 {
10972 case IEMMODE_16BIT:
10973 IEM_MC_BEGIN(3, 0);
10974 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10975 IEM_MC_ARG(uint16_t, u16Src, 1);
10976 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10977
10978 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10979 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10980 IEM_MC_REF_EFLAGS(pEFlags);
10981 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10982
10983 IEM_MC_ADVANCE_RIP_AND_FINISH();
10984 IEM_MC_END();
10985 break;
10986
10987 case IEMMODE_32BIT:
10988 IEM_MC_BEGIN(3, 0);
10989 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10990 IEM_MC_ARG(uint32_t, u32Src, 1);
10991 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10992
10993 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10994 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10995 IEM_MC_REF_EFLAGS(pEFlags);
10996 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10997 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10998 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10999 } IEM_MC_ENDIF();
11000 IEM_MC_ADVANCE_RIP_AND_FINISH();
11001 IEM_MC_END();
11002 break;
11003
11004 case IEMMODE_64BIT:
11005 IEM_MC_BEGIN(3, 0);
11006 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11007 IEM_MC_ARG(uint64_t, u64Src, 1);
11008 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11009
11010 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11011 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11012 IEM_MC_REF_EFLAGS(pEFlags);
11013 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11014
11015 IEM_MC_ADVANCE_RIP_AND_FINISH();
11016 IEM_MC_END();
11017 break;
11018
11019 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11020 }
11021 }
11022 else
11023 {
11024 /*
11025 * We're accessing memory.
11026 */
11027 switch (pVCpu->iem.s.enmEffOpSize)
11028 {
11029 case IEMMODE_16BIT:
11030 IEM_MC_BEGIN(3, 1);
11031 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11032 IEM_MC_ARG(uint16_t, u16Src, 1);
11033 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11034 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11035
11036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11038 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11039 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11040 IEM_MC_REF_EFLAGS(pEFlags);
11041 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11042
11043 IEM_MC_ADVANCE_RIP_AND_FINISH();
11044 IEM_MC_END();
11045 break;
11046
11047 case IEMMODE_32BIT:
11048 IEM_MC_BEGIN(3, 1);
11049 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11050 IEM_MC_ARG(uint32_t, u32Src, 1);
11051 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11053
11054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11056 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11057 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11058 IEM_MC_REF_EFLAGS(pEFlags);
11059 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11060
11061 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11062 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11063 } IEM_MC_ENDIF();
11064 IEM_MC_ADVANCE_RIP_AND_FINISH();
11065 IEM_MC_END();
11066 break;
11067
11068 case IEMMODE_64BIT:
11069 IEM_MC_BEGIN(3, 1);
11070 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11071 IEM_MC_ARG(uint64_t, u64Src, 1);
11072 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11074
11075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11077 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11078 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11079 IEM_MC_REF_EFLAGS(pEFlags);
11080 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11081
11082 IEM_MC_ADVANCE_RIP_AND_FINISH();
11083 IEM_MC_END();
11084 break;
11085
11086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11087 }
11088 }
11089}
11090
11091
11092/** Opcode 0x0f 0xbc. */
11093FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11094{
11095 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11096 IEMOP_HLP_MIN_386();
11097 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11098 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
11099}
11100
11101
11102/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
11103FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11104{
11105 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11106 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11107 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11108
11109#ifndef TST_IEM_CHECK_MC
11110 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11111 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11112 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11113 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11114 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11115 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11116 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11117 {
11118 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11119 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11120 };
11121#endif
11122 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11123 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11124 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11125 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
11126}
11127
11128
11129/** Opcode 0x0f 0xbd. */
11130FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11131{
11132 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11133 IEMOP_HLP_MIN_386();
11134 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11135 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
11136}
11137
11138
11139/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
11140FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11141{
11142 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11143 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11144 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11145
11146#ifndef TST_IEM_CHECK_MC
11147 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11148 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11149 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11150 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11151 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11152 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11153 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11154 {
11155 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11156 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11157 };
11158#endif
11159 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11160 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11161 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11162 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
11163}
11164
11165
11166
11167/** Opcode 0x0f 0xbe. */
11168FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11169{
11170 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11171 IEMOP_HLP_MIN_386();
11172
11173 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11174
11175 /*
11176 * If rm is denoting a register, no more instruction bytes.
11177 */
11178 if (IEM_IS_MODRM_REG_MODE(bRm))
11179 {
11180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11181 switch (pVCpu->iem.s.enmEffOpSize)
11182 {
11183 case IEMMODE_16BIT:
11184 IEM_MC_BEGIN(0, 1);
11185 IEM_MC_LOCAL(uint16_t, u16Value);
11186 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11187 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11188 IEM_MC_ADVANCE_RIP_AND_FINISH();
11189 IEM_MC_END();
11190 break;
11191
11192 case IEMMODE_32BIT:
11193 IEM_MC_BEGIN(0, 1);
11194 IEM_MC_LOCAL(uint32_t, u32Value);
11195 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11196 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11197 IEM_MC_ADVANCE_RIP_AND_FINISH();
11198 IEM_MC_END();
11199 break;
11200
11201 case IEMMODE_64BIT:
11202 IEM_MC_BEGIN(0, 1);
11203 IEM_MC_LOCAL(uint64_t, u64Value);
11204 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11205 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11206 IEM_MC_ADVANCE_RIP_AND_FINISH();
11207 IEM_MC_END();
11208 break;
11209
11210 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11211 }
11212 }
11213 else
11214 {
11215 /*
11216 * We're loading a register from memory.
11217 */
11218 switch (pVCpu->iem.s.enmEffOpSize)
11219 {
11220 case IEMMODE_16BIT:
11221 IEM_MC_BEGIN(0, 2);
11222 IEM_MC_LOCAL(uint16_t, u16Value);
11223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11226 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11227 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11228 IEM_MC_ADVANCE_RIP_AND_FINISH();
11229 IEM_MC_END();
11230 break;
11231
11232 case IEMMODE_32BIT:
11233 IEM_MC_BEGIN(0, 2);
11234 IEM_MC_LOCAL(uint32_t, u32Value);
11235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11238 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11239 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11240 IEM_MC_ADVANCE_RIP_AND_FINISH();
11241 IEM_MC_END();
11242 break;
11243
11244 case IEMMODE_64BIT:
11245 IEM_MC_BEGIN(0, 2);
11246 IEM_MC_LOCAL(uint64_t, u64Value);
11247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11250 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11251 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11252 IEM_MC_ADVANCE_RIP_AND_FINISH();
11253 IEM_MC_END();
11254 break;
11255
11256 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11257 }
11258 }
11259}
11260
11261
11262/** Opcode 0x0f 0xbf. */
11263FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11264{
11265 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11266 IEMOP_HLP_MIN_386();
11267
11268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11269
11270 /** @todo Not entirely sure how the operand size prefix is handled here,
11271 * assuming that it will be ignored. Would be nice to have a few
11272 * test for this. */
11273 /*
11274 * If rm is denoting a register, no more instruction bytes.
11275 */
11276 if (IEM_IS_MODRM_REG_MODE(bRm))
11277 {
11278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11279 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11280 {
11281 IEM_MC_BEGIN(0, 1);
11282 IEM_MC_LOCAL(uint32_t, u32Value);
11283 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11284 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11285 IEM_MC_ADVANCE_RIP_AND_FINISH();
11286 IEM_MC_END();
11287 }
11288 else
11289 {
11290 IEM_MC_BEGIN(0, 1);
11291 IEM_MC_LOCAL(uint64_t, u64Value);
11292 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11293 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11294 IEM_MC_ADVANCE_RIP_AND_FINISH();
11295 IEM_MC_END();
11296 }
11297 }
11298 else
11299 {
11300 /*
11301 * We're loading a register from memory.
11302 */
11303 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11304 {
11305 IEM_MC_BEGIN(0, 2);
11306 IEM_MC_LOCAL(uint32_t, u32Value);
11307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11310 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11311 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11312 IEM_MC_ADVANCE_RIP_AND_FINISH();
11313 IEM_MC_END();
11314 }
11315 else
11316 {
11317 IEM_MC_BEGIN(0, 2);
11318 IEM_MC_LOCAL(uint64_t, u64Value);
11319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11322 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11323 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11324 IEM_MC_ADVANCE_RIP_AND_FINISH();
11325 IEM_MC_END();
11326 }
11327 }
11328}
11329
11330
11331/** Opcode 0x0f 0xc0. */
11332FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11333{
11334 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11335 IEMOP_HLP_MIN_486();
11336 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11337
11338 /*
11339 * If rm is denoting a register, no more instruction bytes.
11340 */
11341 if (IEM_IS_MODRM_REG_MODE(bRm))
11342 {
11343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11344
11345 IEM_MC_BEGIN(3, 0);
11346 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11347 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11348 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11349
11350 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11351 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11352 IEM_MC_REF_EFLAGS(pEFlags);
11353 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11354
11355 IEM_MC_ADVANCE_RIP_AND_FINISH();
11356 IEM_MC_END();
11357 }
11358 else
11359 {
11360 /*
11361 * We're accessing memory.
11362 */
11363 IEM_MC_BEGIN(3, 3);
11364 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11365 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11366 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11367 IEM_MC_LOCAL(uint8_t, u8RegCopy);
11368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11369
11370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11371 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11372 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11373 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
11374 IEM_MC_FETCH_EFLAGS(EFlags);
11375 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11376 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11377 else
11378 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
11379
11380 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11381 IEM_MC_COMMIT_EFLAGS(EFlags);
11382 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
11383 IEM_MC_ADVANCE_RIP_AND_FINISH();
11384 IEM_MC_END();
11385 }
11386}
11387
11388
11389/** Opcode 0x0f 0xc1. */
11390FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11391{
11392 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11393 IEMOP_HLP_MIN_486();
11394 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11395
11396 /*
11397 * If rm is denoting a register, no more instruction bytes.
11398 */
11399 if (IEM_IS_MODRM_REG_MODE(bRm))
11400 {
11401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11402
11403 switch (pVCpu->iem.s.enmEffOpSize)
11404 {
11405 case IEMMODE_16BIT:
11406 IEM_MC_BEGIN(3, 0);
11407 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11408 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11409 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11410
11411 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11412 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11413 IEM_MC_REF_EFLAGS(pEFlags);
11414 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11415
11416 IEM_MC_ADVANCE_RIP_AND_FINISH();
11417 IEM_MC_END();
11418 break;
11419
11420 case IEMMODE_32BIT:
11421 IEM_MC_BEGIN(3, 0);
11422 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11423 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11424 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11425
11426 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11427 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11428 IEM_MC_REF_EFLAGS(pEFlags);
11429 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11430
11431 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11432 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11433 IEM_MC_ADVANCE_RIP_AND_FINISH();
11434 IEM_MC_END();
11435 break;
11436
11437 case IEMMODE_64BIT:
11438 IEM_MC_BEGIN(3, 0);
11439 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11440 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11441 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11442
11443 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11444 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11445 IEM_MC_REF_EFLAGS(pEFlags);
11446 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11447
11448 IEM_MC_ADVANCE_RIP_AND_FINISH();
11449 IEM_MC_END();
11450 break;
11451
11452 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11453 }
11454 }
11455 else
11456 {
11457 /*
11458 * We're accessing memory.
11459 */
11460 switch (pVCpu->iem.s.enmEffOpSize)
11461 {
11462 case IEMMODE_16BIT:
11463 IEM_MC_BEGIN(3, 3);
11464 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11465 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11466 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11467 IEM_MC_LOCAL(uint16_t, u16RegCopy);
11468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11469
11470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11471 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11472 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11473 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
11474 IEM_MC_FETCH_EFLAGS(EFlags);
11475 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11476 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11477 else
11478 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
11479
11480 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
11481 IEM_MC_COMMIT_EFLAGS(EFlags);
11482 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
11483 IEM_MC_ADVANCE_RIP_AND_FINISH();
11484 IEM_MC_END();
11485 break;
11486
11487 case IEMMODE_32BIT:
11488 IEM_MC_BEGIN(3, 3);
11489 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11490 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11491 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11492 IEM_MC_LOCAL(uint32_t, u32RegCopy);
11493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11494
11495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11496 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11497 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11498 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
11499 IEM_MC_FETCH_EFLAGS(EFlags);
11500 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11501 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11502 else
11503 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
11504
11505 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
11506 IEM_MC_COMMIT_EFLAGS(EFlags);
11507 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
11508 IEM_MC_ADVANCE_RIP_AND_FINISH();
11509 IEM_MC_END();
11510 break;
11511
11512 case IEMMODE_64BIT:
11513 IEM_MC_BEGIN(3, 3);
11514 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11515 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11516 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11517 IEM_MC_LOCAL(uint64_t, u64RegCopy);
11518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11519
11520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11521 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11522 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11523 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
11524 IEM_MC_FETCH_EFLAGS(EFlags);
11525 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11526 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11527 else
11528 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
11529
11530 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
11531 IEM_MC_COMMIT_EFLAGS(EFlags);
11532 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
11533 IEM_MC_ADVANCE_RIP_AND_FINISH();
11534 IEM_MC_END();
11535 break;
11536
11537 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11538 }
11539 }
11540}
11541
11542
11543/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11544FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11545{
11546 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11547
11548 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11549 if (IEM_IS_MODRM_REG_MODE(bRm))
11550 {
11551 /*
11552 * XMM, XMM.
11553 */
11554 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11556 IEM_MC_BEGIN(4, 2);
11557 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11558 IEM_MC_LOCAL(X86XMMREG, Dst);
11559 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11560 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11561 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11562 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11563 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11564 IEM_MC_PREPARE_SSE_USAGE();
11565 IEM_MC_REF_MXCSR(pfMxcsr);
11566 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11567 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11568 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11569 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11570 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11571 } IEM_MC_ELSE() {
11572 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11573 } IEM_MC_ENDIF();
11574
11575 IEM_MC_ADVANCE_RIP_AND_FINISH();
11576 IEM_MC_END();
11577 }
11578 else
11579 {
11580 /*
11581 * XMM, [mem128].
11582 */
11583 IEM_MC_BEGIN(4, 3);
11584 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11585 IEM_MC_LOCAL(X86XMMREG, Dst);
11586 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11587 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11588 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11590
11591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11592 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11593 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11595 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11596 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11597
11598 IEM_MC_PREPARE_SSE_USAGE();
11599 IEM_MC_REF_MXCSR(pfMxcsr);
11600 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11601 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11602 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11603 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11604 } IEM_MC_ELSE() {
11605 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11606 } IEM_MC_ENDIF();
11607
11608 IEM_MC_ADVANCE_RIP_AND_FINISH();
11609 IEM_MC_END();
11610 }
11611}
11612
11613
11614/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11615FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11616{
11617 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11618
11619 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11620 if (IEM_IS_MODRM_REG_MODE(bRm))
11621 {
11622 /*
11623 * XMM, XMM.
11624 */
11625 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11627 IEM_MC_BEGIN(4, 2);
11628 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11629 IEM_MC_LOCAL(X86XMMREG, Dst);
11630 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11631 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11632 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11633 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11634 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11635 IEM_MC_PREPARE_SSE_USAGE();
11636 IEM_MC_REF_MXCSR(pfMxcsr);
11637 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11638 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11639 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11640 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11641 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11642 } IEM_MC_ELSE() {
11643 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11644 } IEM_MC_ENDIF();
11645
11646 IEM_MC_ADVANCE_RIP_AND_FINISH();
11647 IEM_MC_END();
11648 }
11649 else
11650 {
11651 /*
11652 * XMM, [mem128].
11653 */
11654 IEM_MC_BEGIN(4, 3);
11655 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11656 IEM_MC_LOCAL(X86XMMREG, Dst);
11657 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11658 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11659 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11661
11662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11663 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11664 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11666 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11667 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11668
11669 IEM_MC_PREPARE_SSE_USAGE();
11670 IEM_MC_REF_MXCSR(pfMxcsr);
11671 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11672 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11673 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11674 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11675 } IEM_MC_ELSE() {
11676 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11677 } IEM_MC_ENDIF();
11678
11679 IEM_MC_ADVANCE_RIP_AND_FINISH();
11680 IEM_MC_END();
11681 }
11682}
11683
11684
11685/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11686FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11687{
11688 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11689
11690 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11691 if (IEM_IS_MODRM_REG_MODE(bRm))
11692 {
11693 /*
11694 * XMM32, XMM32.
11695 */
11696 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11698 IEM_MC_BEGIN(4, 2);
11699 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11700 IEM_MC_LOCAL(X86XMMREG, Dst);
11701 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11702 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11703 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11704 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11705 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11706 IEM_MC_PREPARE_SSE_USAGE();
11707 IEM_MC_REF_MXCSR(pfMxcsr);
11708 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11709 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11710 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11711 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11712 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11713 } IEM_MC_ELSE() {
11714 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11715 } IEM_MC_ENDIF();
11716
11717 IEM_MC_ADVANCE_RIP_AND_FINISH();
11718 IEM_MC_END();
11719 }
11720 else
11721 {
11722 /*
11723 * XMM32, [mem32].
11724 */
11725 IEM_MC_BEGIN(4, 3);
11726 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11727 IEM_MC_LOCAL(X86XMMREG, Dst);
11728 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11729 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11730 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11732
11733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11734 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11735 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11737 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11738 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11739
11740 IEM_MC_PREPARE_SSE_USAGE();
11741 IEM_MC_REF_MXCSR(pfMxcsr);
11742 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11743 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11744 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11745 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11746 } IEM_MC_ELSE() {
11747 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11748 } IEM_MC_ENDIF();
11749
11750 IEM_MC_ADVANCE_RIP_AND_FINISH();
11751 IEM_MC_END();
11752 }
11753}
11754
11755
11756/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11757FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11758{
11759 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11760
11761 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11762 if (IEM_IS_MODRM_REG_MODE(bRm))
11763 {
11764 /*
11765 * XMM64, XMM64.
11766 */
11767 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11769 IEM_MC_BEGIN(4, 2);
11770 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11771 IEM_MC_LOCAL(X86XMMREG, Dst);
11772 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11773 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11774 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11775 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11776 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11777 IEM_MC_PREPARE_SSE_USAGE();
11778 IEM_MC_REF_MXCSR(pfMxcsr);
11779 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11780 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11781 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11782 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11783 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11784 } IEM_MC_ELSE() {
11785 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11786 } IEM_MC_ENDIF();
11787
11788 IEM_MC_ADVANCE_RIP_AND_FINISH();
11789 IEM_MC_END();
11790 }
11791 else
11792 {
11793 /*
11794 * XMM64, [mem64].
11795 */
11796 IEM_MC_BEGIN(4, 3);
11797 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11798 IEM_MC_LOCAL(X86XMMREG, Dst);
11799 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11800 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11801 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11802 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11803
11804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11805 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11806 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11808 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11809 IEM_MC_FETCH_MEM_XMM_U64(Src.uSrc2, 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11810
11811 IEM_MC_PREPARE_SSE_USAGE();
11812 IEM_MC_REF_MXCSR(pfMxcsr);
11813 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11814 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11815 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11816 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11817 } IEM_MC_ELSE() {
11818 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11819 } IEM_MC_ENDIF();
11820
11821 IEM_MC_ADVANCE_RIP_AND_FINISH();
11822 IEM_MC_END();
11823 }
11824}
11825
11826
11827/** Opcode 0x0f 0xc3. */
11828FNIEMOP_DEF(iemOp_movnti_My_Gy)
11829{
11830 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
11831
11832 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11833
11834 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
11835 if (IEM_IS_MODRM_MEM_MODE(bRm))
11836 {
11837 switch (pVCpu->iem.s.enmEffOpSize)
11838 {
11839 case IEMMODE_32BIT:
11840 IEM_MC_BEGIN(0, 2);
11841 IEM_MC_LOCAL(uint32_t, u32Value);
11842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11843
11844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11846 IEMOP_HLP_RAISE_UD_IF_MISSING_GUEST_FEATURE(pVCpu, fSse2);
11847
11848 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11849 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11850 IEM_MC_ADVANCE_RIP_AND_FINISH();
11851 IEM_MC_END();
11852 break;
11853
11854 case IEMMODE_64BIT:
11855 IEM_MC_BEGIN(0, 2);
11856 IEM_MC_LOCAL(uint64_t, u64Value);
11857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11858
11859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11861 IEMOP_HLP_RAISE_UD_IF_MISSING_GUEST_FEATURE(pVCpu, fSse2);
11862
11863 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11864 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11865 IEM_MC_ADVANCE_RIP_AND_FINISH();
11866 IEM_MC_END();
11867 break;
11868
11869 case IEMMODE_16BIT:
11870 /** @todo check this form. */
11871 IEMOP_RAISE_INVALID_OPCODE_RET();
11872
11873 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11874 }
11875 }
11876 else
11877 IEMOP_RAISE_INVALID_OPCODE_RET();
11878}
11879
11880
11881/* Opcode 0x66 0x0f 0xc3 - invalid */
11882/* Opcode 0xf3 0x0f 0xc3 - invalid */
11883/* Opcode 0xf2 0x0f 0xc3 - invalid */
11884
11885
11886/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
11887FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
11888{
11889 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
11890 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11891 if (IEM_IS_MODRM_REG_MODE(bRm))
11892 {
11893 /*
11894 * Register, register.
11895 */
11896 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
11898 IEM_MC_BEGIN(3, 0);
11899 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11900 IEM_MC_ARG(uint16_t, u16Src, 1);
11901 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11902 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
11903 IEM_MC_PREPARE_FPU_USAGE();
11904 IEM_MC_FPU_TO_MMX_MODE();
11905 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
11906 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11907 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
11908 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11909 IEM_MC_ADVANCE_RIP_AND_FINISH();
11910 IEM_MC_END();
11911 }
11912 else
11913 {
11914 /*
11915 * Register, memory.
11916 */
11917 IEM_MC_BEGIN(3, 1);
11918 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11919 IEM_MC_ARG(uint16_t, u16Src, 1);
11920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11921
11922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11923 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11924 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
11926 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
11927 IEM_MC_PREPARE_FPU_USAGE();
11928 IEM_MC_FPU_TO_MMX_MODE();
11929
11930 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11931 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
11932 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
11933 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11934 IEM_MC_ADVANCE_RIP_AND_FINISH();
11935 IEM_MC_END();
11936 }
11937}
11938
11939
11940/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
11941FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
11942{
11943 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11944 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11945 if (IEM_IS_MODRM_REG_MODE(bRm))
11946 {
11947 /*
11948 * Register, register.
11949 */
11950 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11952 IEM_MC_BEGIN(3, 0);
11953 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11954 IEM_MC_ARG(uint16_t, u16Src, 1);
11955 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11956 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11957 IEM_MC_PREPARE_SSE_USAGE();
11958 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11959 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11960 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
11961 IEM_MC_ADVANCE_RIP_AND_FINISH();
11962 IEM_MC_END();
11963 }
11964 else
11965 {
11966 /*
11967 * Register, memory.
11968 */
11969 IEM_MC_BEGIN(3, 2);
11970 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11971 IEM_MC_ARG(uint16_t, u16Src, 1);
11972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11973
11974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11975 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11976 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11978 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11979 IEM_MC_PREPARE_SSE_USAGE();
11980
11981 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11982 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11983 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
11984 IEM_MC_ADVANCE_RIP_AND_FINISH();
11985 IEM_MC_END();
11986 }
11987}
11988
11989
11990/* Opcode 0xf3 0x0f 0xc4 - invalid */
11991/* Opcode 0xf2 0x0f 0xc4 - invalid */
11992
11993
11994/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
11995FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
11996{
11997 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
11998 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11999 if (IEM_IS_MODRM_REG_MODE(bRm))
12000 {
12001 /*
12002 * Greg32, MMX, imm8.
12003 */
12004 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12006 IEM_MC_BEGIN(3, 1);
12007 IEM_MC_LOCAL(uint16_t, u16Dst);
12008 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12009 IEM_MC_ARG(uint64_t, u64Src, 1);
12010 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12011 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12012 IEM_MC_PREPARE_FPU_USAGE();
12013 IEM_MC_FPU_TO_MMX_MODE();
12014 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
12015 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bImmArg);
12016 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12017 IEM_MC_ADVANCE_RIP_AND_FINISH();
12018 IEM_MC_END();
12019 }
12020 /* No memory operand. */
12021 else
12022 IEMOP_RAISE_INVALID_OPCODE_RET();
12023}
12024
12025
12026/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12027FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12028{
12029 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12030 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12031 if (IEM_IS_MODRM_REG_MODE(bRm))
12032 {
12033 /*
12034 * Greg32, XMM, imm8.
12035 */
12036 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12038 IEM_MC_BEGIN(3, 1);
12039 IEM_MC_LOCAL(uint16_t, u16Dst);
12040 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12041 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12042 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12043 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12044 IEM_MC_PREPARE_SSE_USAGE();
12045 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12046 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bImmArg);
12047 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12048 IEM_MC_ADVANCE_RIP_AND_FINISH();
12049 IEM_MC_END();
12050 }
12051 /* No memory operand. */
12052 else
12053 IEMOP_RAISE_INVALID_OPCODE_RET();
12054}
12055
12056
12057/* Opcode 0xf3 0x0f 0xc5 - invalid */
12058/* Opcode 0xf2 0x0f 0xc5 - invalid */
12059
12060
12061/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12062FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12063{
12064 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12066 if (IEM_IS_MODRM_REG_MODE(bRm))
12067 {
12068 /*
12069 * XMM, XMM, imm8.
12070 */
12071 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12073 IEM_MC_BEGIN(3, 0);
12074 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12075 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12076 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12077 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12078 IEM_MC_PREPARE_SSE_USAGE();
12079 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12080 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12081 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12082 IEM_MC_ADVANCE_RIP_AND_FINISH();
12083 IEM_MC_END();
12084 }
12085 else
12086 {
12087 /*
12088 * XMM, [mem128], imm8.
12089 */
12090 IEM_MC_BEGIN(3, 2);
12091 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12092 IEM_MC_LOCAL(RTUINT128U, uSrc);
12093 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12095
12096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12097 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12098 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12100 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12101 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12102
12103 IEM_MC_PREPARE_SSE_USAGE();
12104 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12105 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12106
12107 IEM_MC_ADVANCE_RIP_AND_FINISH();
12108 IEM_MC_END();
12109 }
12110}
12111
12112
12113/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12114FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12115{
12116 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12117 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12118 if (IEM_IS_MODRM_REG_MODE(bRm))
12119 {
12120 /*
12121 * XMM, XMM, imm8.
12122 */
12123 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12125 IEM_MC_BEGIN(3, 0);
12126 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12127 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12128 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12129 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12130 IEM_MC_PREPARE_SSE_USAGE();
12131 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12132 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12133 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12134 IEM_MC_ADVANCE_RIP_AND_FINISH();
12135 IEM_MC_END();
12136 }
12137 else
12138 {
12139 /*
12140 * XMM, [mem128], imm8.
12141 */
12142 IEM_MC_BEGIN(3, 2);
12143 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12144 IEM_MC_LOCAL(RTUINT128U, uSrc);
12145 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12147
12148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12149 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12150 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12152 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12153 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12154
12155 IEM_MC_PREPARE_SSE_USAGE();
12156 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12157 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12158
12159 IEM_MC_ADVANCE_RIP_AND_FINISH();
12160 IEM_MC_END();
12161 }
12162}
12163
12164
12165/* Opcode 0xf3 0x0f 0xc6 - invalid */
12166/* Opcode 0xf2 0x0f 0xc6 - invalid */
12167
12168
12169/** Opcode 0x0f 0xc7 !11/1. */
12170FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12171{
12172 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12173
12174 IEM_MC_BEGIN(4, 3);
12175 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
12176 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
12177 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
12178 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12179 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
12180 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
12181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12182
12183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12184 IEMOP_HLP_DONE_DECODING();
12185 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12186
12187 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
12188 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
12189 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
12190
12191 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
12192 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
12193 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
12194
12195 IEM_MC_FETCH_EFLAGS(EFlags);
12196 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12197 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12198 else
12199 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12200
12201 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
12202 IEM_MC_COMMIT_EFLAGS(EFlags);
12203 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12204 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
12205 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
12206 } IEM_MC_ENDIF();
12207 IEM_MC_ADVANCE_RIP_AND_FINISH();
12208
12209 IEM_MC_END();
12210}
12211
12212
12213/** Opcode REX.W 0x0f 0xc7 !11/1. */
12214FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12215{
12216 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12217 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12218 {
12219 IEM_MC_BEGIN(4, 3);
12220 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
12221 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
12222 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
12223 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12224 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
12225 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
12226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12227
12228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12229 IEMOP_HLP_DONE_DECODING();
12230 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
12231 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12232
12233 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
12234 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
12235 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
12236
12237 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
12238 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
12239 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
12240
12241 IEM_MC_FETCH_EFLAGS(EFlags);
12242
12243#ifdef RT_ARCH_AMD64 /* some code duplication here because IEMAllInstructionsPython.py cannot parse if/else/#if spaghetti. */
12244 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12245 {
12246 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12247 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12248 else
12249 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12250 }
12251 else
12252 { /* (see comments in #else case below) */
12253 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12254 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12255 else
12256 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12257 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12258 }
12259
12260#elif defined(RT_ARCH_ARM64)
12261 /** @todo may require fallback for unaligned accesses... */
12262 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12263 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12264 else
12265 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12266
12267#else
12268 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12269 accesses and not all all atomic, which works fine on in UNI CPU guest
12270 configuration (ignoring DMA). If guest SMP is active we have no choice
12271 but to use a rendezvous callback here. Sigh. */
12272 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12273 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12274 else
12275 {
12276 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12277 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12278 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12279 }
12280#endif
12281
12282 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
12283 IEM_MC_COMMIT_EFLAGS(EFlags);
12284 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12285 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
12286 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
12287 } IEM_MC_ENDIF();
12288 IEM_MC_ADVANCE_RIP_AND_FINISH();
12289
12290 IEM_MC_END();
12291 }
12292 Log(("cmpxchg16b -> #UD\n"));
12293 IEMOP_RAISE_INVALID_OPCODE_RET();
12294}
12295
12296FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12297{
12298 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12299 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12300 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12301}
12302
12303
12304/** Opcode 0x0f 0xc7 11/6. */
12305FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12306{
12307 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12308 IEMOP_RAISE_INVALID_OPCODE_RET();
12309
12310 if (IEM_IS_MODRM_REG_MODE(bRm))
12311 {
12312 /* register destination. */
12313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12314 switch (pVCpu->iem.s.enmEffOpSize)
12315 {
12316 case IEMMODE_16BIT:
12317 IEM_MC_BEGIN(2, 0);
12318 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12319 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12320
12321 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12322 IEM_MC_REF_EFLAGS(pEFlags);
12323 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u16, iemAImpl_rdrand_u16_fallback),
12324 pu16Dst, pEFlags);
12325
12326 IEM_MC_ADVANCE_RIP_AND_FINISH();
12327 IEM_MC_END();
12328 break;
12329
12330 case IEMMODE_32BIT:
12331 IEM_MC_BEGIN(2, 0);
12332 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12333 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12334
12335 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12336 IEM_MC_REF_EFLAGS(pEFlags);
12337 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u32, iemAImpl_rdrand_u32_fallback),
12338 pu32Dst, pEFlags);
12339
12340 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12341 IEM_MC_ADVANCE_RIP_AND_FINISH();
12342 IEM_MC_END();
12343 break;
12344
12345 case IEMMODE_64BIT:
12346 IEM_MC_BEGIN(2, 0);
12347 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12348 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12349
12350 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12351 IEM_MC_REF_EFLAGS(pEFlags);
12352 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u64, iemAImpl_rdrand_u64_fallback),
12353 pu64Dst, pEFlags);
12354
12355 IEM_MC_ADVANCE_RIP_AND_FINISH();
12356 IEM_MC_END();
12357 break;
12358
12359 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12360 }
12361 }
12362 /* Register only. */
12363 else
12364 IEMOP_RAISE_INVALID_OPCODE_RET();
12365}
12366
12367/** Opcode 0x0f 0xc7 !11/6. */
12368#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12369FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12370{
12371 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12372 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12373 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12374 IEM_MC_BEGIN(2, 0);
12375 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12376 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12378 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12379 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12380 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12381 IEM_MC_END();
12382}
12383#else
12384FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12385#endif
12386
12387/** Opcode 0x66 0x0f 0xc7 !11/6. */
12388#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12389FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12390{
12391 IEMOP_MNEMONIC(vmclear, "vmclear");
12392 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12393 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12394 IEM_MC_BEGIN(2, 0);
12395 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12396 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12398 IEMOP_HLP_DONE_DECODING();
12399 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12400 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12401 IEM_MC_END();
12402}
12403#else
12404FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12405#endif
12406
12407/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12408#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12409FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12410{
12411 IEMOP_MNEMONIC(vmxon, "vmxon");
12412 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12413 IEM_MC_BEGIN(2, 0);
12414 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12415 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12417 IEMOP_HLP_DONE_DECODING();
12418 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12419 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12420 IEM_MC_END();
12421}
12422#else
12423FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12424#endif
12425
12426/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12427#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12428FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12429{
12430 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12431 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12432 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12433 IEM_MC_BEGIN(2, 0);
12434 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12435 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12437 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12438 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12439 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12440 IEM_MC_END();
12441}
12442#else
12443FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12444#endif
12445
12446/** Opcode 0x0f 0xc7 11/7. */
12447FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12448{
12449 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12450 IEMOP_RAISE_INVALID_OPCODE_RET();
12451
12452 if (IEM_IS_MODRM_REG_MODE(bRm))
12453 {
12454 /* register destination. */
12455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12456 switch (pVCpu->iem.s.enmEffOpSize)
12457 {
12458 case IEMMODE_16BIT:
12459 IEM_MC_BEGIN(2, 0);
12460 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12461 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12462
12463 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12464 IEM_MC_REF_EFLAGS(pEFlags);
12465 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u16, iemAImpl_rdseed_u16_fallback),
12466 pu16Dst, pEFlags);
12467
12468 IEM_MC_ADVANCE_RIP_AND_FINISH();
12469 IEM_MC_END();
12470 break;
12471
12472 case IEMMODE_32BIT:
12473 IEM_MC_BEGIN(2, 0);
12474 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12475 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12476
12477 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12478 IEM_MC_REF_EFLAGS(pEFlags);
12479 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u32, iemAImpl_rdseed_u32_fallback),
12480 pu32Dst, pEFlags);
12481
12482 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12483 IEM_MC_ADVANCE_RIP_AND_FINISH();
12484 IEM_MC_END();
12485 break;
12486
12487 case IEMMODE_64BIT:
12488 IEM_MC_BEGIN(2, 0);
12489 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12490 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12491
12492 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12493 IEM_MC_REF_EFLAGS(pEFlags);
12494 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u64, iemAImpl_rdseed_u64_fallback),
12495 pu64Dst, pEFlags);
12496
12497 IEM_MC_ADVANCE_RIP_AND_FINISH();
12498 IEM_MC_END();
12499 break;
12500
12501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12502 }
12503 }
12504 /* Register only. */
12505 else
12506 IEMOP_RAISE_INVALID_OPCODE_RET();
12507}
12508
12509/**
12510 * Group 9 jump table for register variant.
12511 */
12512IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12513{ /* pfx: none, 066h, 0f3h, 0f2h */
12514 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12515 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12516 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12517 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12518 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12519 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12520 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12521 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12522};
12523AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12524
12525
12526/**
12527 * Group 9 jump table for memory variant.
12528 */
12529IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12530{ /* pfx: none, 066h, 0f3h, 0f2h */
12531 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12532 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12533 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12534 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12535 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12536 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12537 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12538 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12539};
12540AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12541
12542
12543/** Opcode 0x0f 0xc7. */
12544FNIEMOP_DEF(iemOp_Grp9)
12545{
12546 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
12547 if (IEM_IS_MODRM_REG_MODE(bRm))
12548 /* register, register */
12549 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12550 + pVCpu->iem.s.idxPrefix], bRm);
12551 /* memory, register */
12552 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12553 + pVCpu->iem.s.idxPrefix], bRm);
12554}
12555
12556
12557/**
12558 * Common 'bswap register' helper.
12559 */
12560FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12561{
12562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12563 switch (pVCpu->iem.s.enmEffOpSize)
12564 {
12565 case IEMMODE_16BIT:
12566 IEM_MC_BEGIN(1, 0);
12567 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12568 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12569 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12570 IEM_MC_ADVANCE_RIP_AND_FINISH();
12571 IEM_MC_END();
12572 break;
12573
12574 case IEMMODE_32BIT:
12575 IEM_MC_BEGIN(1, 0);
12576 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12577 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12578 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12579 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12580 IEM_MC_ADVANCE_RIP_AND_FINISH();
12581 IEM_MC_END();
12582 break;
12583
12584 case IEMMODE_64BIT:
12585 IEM_MC_BEGIN(1, 0);
12586 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12587 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12588 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12589 IEM_MC_ADVANCE_RIP_AND_FINISH();
12590 IEM_MC_END();
12591 break;
12592
12593 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12594 }
12595}
12596
12597
12598/** Opcode 0x0f 0xc8. */
12599FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12600{
12601 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12602 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12603 prefix. REX.B is the correct prefix it appears. For a parallel
12604 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12605 IEMOP_HLP_MIN_486();
12606 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12607}
12608
12609
12610/** Opcode 0x0f 0xc9. */
12611FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12612{
12613 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12614 IEMOP_HLP_MIN_486();
12615 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12616}
12617
12618
12619/** Opcode 0x0f 0xca. */
12620FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12621{
12622 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12623 IEMOP_HLP_MIN_486();
12624 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12625}
12626
12627
12628/** Opcode 0x0f 0xcb. */
12629FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12630{
12631 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12632 IEMOP_HLP_MIN_486();
12633 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12634}
12635
12636
12637/** Opcode 0x0f 0xcc. */
12638FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12639{
12640 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12641 IEMOP_HLP_MIN_486();
12642 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12643}
12644
12645
12646/** Opcode 0x0f 0xcd. */
12647FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12648{
12649 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12650 IEMOP_HLP_MIN_486();
12651 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12652}
12653
12654
12655/** Opcode 0x0f 0xce. */
12656FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12657{
12658 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12659 IEMOP_HLP_MIN_486();
12660 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12661}
12662
12663
12664/** Opcode 0x0f 0xcf. */
12665FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12666{
12667 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12668 IEMOP_HLP_MIN_486();
12669 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12670}
12671
12672
12673/* Opcode 0x0f 0xd0 - invalid */
12674
12675
12676/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12677FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12678{
12679 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12680 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12681}
12682
12683
12684/* Opcode 0xf3 0x0f 0xd0 - invalid */
12685
12686
12687/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12688FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12689{
12690 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12691 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12692}
12693
12694
12695
12696/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12697FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12698{
12699 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12700 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12701}
12702
12703/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12704FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12705{
12706 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12707 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12708}
12709
12710/* Opcode 0xf3 0x0f 0xd1 - invalid */
12711/* Opcode 0xf2 0x0f 0xd1 - invalid */
12712
12713/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12714FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12715{
12716 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12717 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12718}
12719
12720
12721/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12722FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12723{
12724 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12725 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12726}
12727
12728
12729/* Opcode 0xf3 0x0f 0xd2 - invalid */
12730/* Opcode 0xf2 0x0f 0xd2 - invalid */
12731
12732/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12733FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12734{
12735 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12736 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12737}
12738
12739
12740/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12741FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12742{
12743 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12744 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12745}
12746
12747
12748/* Opcode 0xf3 0x0f 0xd3 - invalid */
12749/* Opcode 0xf2 0x0f 0xd3 - invalid */
12750
12751
12752/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12753FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12754{
12755 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12756 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
12757}
12758
12759
12760/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12761FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12762{
12763 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12764 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
12765}
12766
12767
12768/* Opcode 0xf3 0x0f 0xd4 - invalid */
12769/* Opcode 0xf2 0x0f 0xd4 - invalid */
12770
12771/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12772FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12773{
12774 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12775 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
12776}
12777
12778/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12779FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12780{
12781 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12782 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
12783}
12784
12785
12786/* Opcode 0xf3 0x0f 0xd5 - invalid */
12787/* Opcode 0xf2 0x0f 0xd5 - invalid */
12788
12789/* Opcode 0x0f 0xd6 - invalid */
12790
12791/**
12792 * @opcode 0xd6
12793 * @oppfx 0x66
12794 * @opcpuid sse2
12795 * @opgroup og_sse2_pcksclr_datamove
12796 * @opxcpttype none
12797 * @optest op1=-1 op2=2 -> op1=2
12798 * @optest op1=0 op2=-42 -> op1=-42
12799 */
12800FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12801{
12802 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12803 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12804 if (IEM_IS_MODRM_REG_MODE(bRm))
12805 {
12806 /*
12807 * Register, register.
12808 */
12809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12810 IEM_MC_BEGIN(0, 2);
12811 IEM_MC_LOCAL(uint64_t, uSrc);
12812
12813 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12814 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12815
12816 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12817 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12818
12819 IEM_MC_ADVANCE_RIP_AND_FINISH();
12820 IEM_MC_END();
12821 }
12822 else
12823 {
12824 /*
12825 * Memory, register.
12826 */
12827 IEM_MC_BEGIN(0, 2);
12828 IEM_MC_LOCAL(uint64_t, uSrc);
12829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12830
12831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12833 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12834 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12835
12836 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12837 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12838
12839 IEM_MC_ADVANCE_RIP_AND_FINISH();
12840 IEM_MC_END();
12841 }
12842}
12843
12844
12845/**
12846 * @opcode 0xd6
12847 * @opcodesub 11 mr/reg
12848 * @oppfx f3
12849 * @opcpuid sse2
12850 * @opgroup og_sse2_simdint_datamove
12851 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12852 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12853 */
12854FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12855{
12856 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12857 if (IEM_IS_MODRM_REG_MODE(bRm))
12858 {
12859 /*
12860 * Register, register.
12861 */
12862 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12864 IEM_MC_BEGIN(0, 1);
12865 IEM_MC_LOCAL(uint64_t, uSrc);
12866
12867 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12868 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12869 IEM_MC_FPU_TO_MMX_MODE();
12870
12871 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
12872 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
12873
12874 IEM_MC_ADVANCE_RIP_AND_FINISH();
12875 IEM_MC_END();
12876 }
12877
12878 /**
12879 * @opdone
12880 * @opmnemonic udf30fd6mem
12881 * @opcode 0xd6
12882 * @opcodesub !11 mr/reg
12883 * @oppfx f3
12884 * @opunused intel-modrm
12885 * @opcpuid sse
12886 * @optest ->
12887 */
12888 else
12889 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12890}
12891
12892
12893/**
12894 * @opcode 0xd6
12895 * @opcodesub 11 mr/reg
12896 * @oppfx f2
12897 * @opcpuid sse2
12898 * @opgroup og_sse2_simdint_datamove
12899 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12900 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12901 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
12902 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
12903 * @optest op1=-42 op2=0xfedcba9876543210
12904 * -> op1=0xfedcba9876543210 ftw=0xff
12905 */
12906FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
12907{
12908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12909 if (IEM_IS_MODRM_REG_MODE(bRm))
12910 {
12911 /*
12912 * Register, register.
12913 */
12914 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12916 IEM_MC_BEGIN(0, 1);
12917 IEM_MC_LOCAL(uint64_t, uSrc);
12918
12919 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12920 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12921 IEM_MC_FPU_TO_MMX_MODE();
12922
12923 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
12924 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
12925
12926 IEM_MC_ADVANCE_RIP_AND_FINISH();
12927 IEM_MC_END();
12928 }
12929
12930 /**
12931 * @opdone
12932 * @opmnemonic udf20fd6mem
12933 * @opcode 0xd6
12934 * @opcodesub !11 mr/reg
12935 * @oppfx f2
12936 * @opunused intel-modrm
12937 * @opcpuid sse
12938 * @optest ->
12939 */
12940 else
12941 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12942}
12943
12944
12945/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
12946FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
12947{
12948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12949 /* Docs says register only. */
12950 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12951 {
12952 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12953 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
12954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12955 IEM_MC_BEGIN(2, 0);
12956 IEM_MC_ARG(uint64_t *, puDst, 0);
12957 IEM_MC_ARG(uint64_t const *, puSrc, 1);
12958 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12959 IEM_MC_PREPARE_FPU_USAGE();
12960 IEM_MC_FPU_TO_MMX_MODE();
12961
12962 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12963 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
12964 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
12965
12966 IEM_MC_ADVANCE_RIP_AND_FINISH();
12967 IEM_MC_END();
12968 }
12969 else
12970 IEMOP_RAISE_INVALID_OPCODE_RET();
12971}
12972
12973
12974/** Opcode 0x66 0x0f 0xd7 - */
12975FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
12976{
12977 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12978 /* Docs says register only. */
12979 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12980 {
12981 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12982 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
12983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12984 IEM_MC_BEGIN(2, 0);
12985 IEM_MC_ARG(uint64_t *, puDst, 0);
12986 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12987 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12988 IEM_MC_PREPARE_SSE_USAGE();
12989 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12990 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12991 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
12992 IEM_MC_ADVANCE_RIP_AND_FINISH();
12993 IEM_MC_END();
12994 }
12995 else
12996 IEMOP_RAISE_INVALID_OPCODE_RET();
12997}
12998
12999
13000/* Opcode 0xf3 0x0f 0xd7 - invalid */
13001/* Opcode 0xf2 0x0f 0xd7 - invalid */
13002
13003
13004/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
13005FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
13006{
13007 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13008 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
13009}
13010
13011
13012/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
13013FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
13014{
13015 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13016 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
13017}
13018
13019
13020/* Opcode 0xf3 0x0f 0xd8 - invalid */
13021/* Opcode 0xf2 0x0f 0xd8 - invalid */
13022
13023/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13024FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13025{
13026 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13027 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
13028}
13029
13030
13031/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13032FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13033{
13034 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13035 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
13036}
13037
13038
13039/* Opcode 0xf3 0x0f 0xd9 - invalid */
13040/* Opcode 0xf2 0x0f 0xd9 - invalid */
13041
13042/** Opcode 0x0f 0xda - pminub Pq, Qq */
13043FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13044{
13045 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13046 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
13047}
13048
13049
13050/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13051FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13052{
13053 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13054 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
13055}
13056
13057/* Opcode 0xf3 0x0f 0xda - invalid */
13058/* Opcode 0xf2 0x0f 0xda - invalid */
13059
13060/** Opcode 0x0f 0xdb - pand Pq, Qq */
13061FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13062{
13063 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13064 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
13065}
13066
13067
13068/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13069FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13070{
13071 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13072 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
13073}
13074
13075
13076/* Opcode 0xf3 0x0f 0xdb - invalid */
13077/* Opcode 0xf2 0x0f 0xdb - invalid */
13078
13079/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13080FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13081{
13082 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13083 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
13084}
13085
13086
13087/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13088FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13089{
13090 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13091 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
13092}
13093
13094
13095/* Opcode 0xf3 0x0f 0xdc - invalid */
13096/* Opcode 0xf2 0x0f 0xdc - invalid */
13097
13098/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13099FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13100{
13101 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13102 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
13103}
13104
13105
13106/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13107FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13108{
13109 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13110 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
13111}
13112
13113
13114/* Opcode 0xf3 0x0f 0xdd - invalid */
13115/* Opcode 0xf2 0x0f 0xdd - invalid */
13116
13117/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13118FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13119{
13120 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13121 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
13122}
13123
13124
13125/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13126FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13127{
13128 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13129 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
13130}
13131
13132/* Opcode 0xf3 0x0f 0xde - invalid */
13133/* Opcode 0xf2 0x0f 0xde - invalid */
13134
13135
13136/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13137FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13138{
13139 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13140 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
13141}
13142
13143
13144/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13145FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13146{
13147 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13148 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
13149}
13150
13151
13152/* Opcode 0xf3 0x0f 0xdf - invalid */
13153/* Opcode 0xf2 0x0f 0xdf - invalid */
13154
13155/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13156FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13157{
13158 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13159 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13160}
13161
13162
13163/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13164FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13165{
13166 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13167 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13168}
13169
13170
13171/* Opcode 0xf3 0x0f 0xe0 - invalid */
13172/* Opcode 0xf2 0x0f 0xe0 - invalid */
13173
13174/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13175FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13176{
13177 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13178 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13179}
13180
13181
13182/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13183FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13184{
13185 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13186 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13187}
13188
13189
13190/* Opcode 0xf3 0x0f 0xe1 - invalid */
13191/* Opcode 0xf2 0x0f 0xe1 - invalid */
13192
13193/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13194FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13195{
13196 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13197 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13198}
13199
13200
13201/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13202FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13203{
13204 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13205 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13206}
13207
13208
13209/* Opcode 0xf3 0x0f 0xe2 - invalid */
13210/* Opcode 0xf2 0x0f 0xe2 - invalid */
13211
13212/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13213FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13214{
13215 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13216 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13217}
13218
13219
13220/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13221FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13222{
13223 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13224 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13225}
13226
13227
13228/* Opcode 0xf3 0x0f 0xe3 - invalid */
13229/* Opcode 0xf2 0x0f 0xe3 - invalid */
13230
13231/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13232FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13233{
13234 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13235 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13236}
13237
13238
13239/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13240FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13241{
13242 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13243 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13244}
13245
13246
13247/* Opcode 0xf3 0x0f 0xe4 - invalid */
13248/* Opcode 0xf2 0x0f 0xe4 - invalid */
13249
13250/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13251FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13252{
13253 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13254 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
13255}
13256
13257
13258/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13259FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13260{
13261 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13262 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
13263}
13264
13265
13266/* Opcode 0xf3 0x0f 0xe5 - invalid */
13267/* Opcode 0xf2 0x0f 0xe5 - invalid */
13268/* Opcode 0x0f 0xe6 - invalid */
13269
13270
13271/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13272FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13273{
13274 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13275 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13276}
13277
13278
13279/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13280FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13281{
13282 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13283 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13284}
13285
13286
13287/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13288FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13289{
13290 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13291 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13292}
13293
13294
13295/**
13296 * @opcode 0xe7
13297 * @opcodesub !11 mr/reg
13298 * @oppfx none
13299 * @opcpuid sse
13300 * @opgroup og_sse1_cachect
13301 * @opxcpttype none
13302 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13303 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13304 */
13305FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13306{
13307 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13309 if (IEM_IS_MODRM_MEM_MODE(bRm))
13310 {
13311 /* Register, memory. */
13312 IEM_MC_BEGIN(0, 2);
13313 IEM_MC_LOCAL(uint64_t, uSrc);
13314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13315
13316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13318 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13319 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13320 IEM_MC_FPU_TO_MMX_MODE();
13321
13322 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13323 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13324
13325 IEM_MC_ADVANCE_RIP_AND_FINISH();
13326 IEM_MC_END();
13327 }
13328 /**
13329 * @opdone
13330 * @opmnemonic ud0fe7reg
13331 * @opcode 0xe7
13332 * @opcodesub 11 mr/reg
13333 * @oppfx none
13334 * @opunused immediate
13335 * @opcpuid sse
13336 * @optest ->
13337 */
13338 else
13339 IEMOP_RAISE_INVALID_OPCODE_RET();
13340}
13341
13342/**
13343 * @opcode 0xe7
13344 * @opcodesub !11 mr/reg
13345 * @oppfx 0x66
13346 * @opcpuid sse2
13347 * @opgroup og_sse2_cachect
13348 * @opxcpttype 1
13349 * @optest op1=-1 op2=2 -> op1=2
13350 * @optest op1=0 op2=-42 -> op1=-42
13351 */
13352FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13353{
13354 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13356 if (IEM_IS_MODRM_MEM_MODE(bRm))
13357 {
13358 /* Register, memory. */
13359 IEM_MC_BEGIN(0, 2);
13360 IEM_MC_LOCAL(RTUINT128U, uSrc);
13361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13362
13363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13365 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13366 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13367
13368 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13369 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13370
13371 IEM_MC_ADVANCE_RIP_AND_FINISH();
13372 IEM_MC_END();
13373 }
13374
13375 /**
13376 * @opdone
13377 * @opmnemonic ud660fe7reg
13378 * @opcode 0xe7
13379 * @opcodesub 11 mr/reg
13380 * @oppfx 0x66
13381 * @opunused immediate
13382 * @opcpuid sse
13383 * @optest ->
13384 */
13385 else
13386 IEMOP_RAISE_INVALID_OPCODE_RET();
13387}
13388
13389/* Opcode 0xf3 0x0f 0xe7 - invalid */
13390/* Opcode 0xf2 0x0f 0xe7 - invalid */
13391
13392
13393/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13394FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13395{
13396 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13397 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
13398}
13399
13400
13401/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13402FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13403{
13404 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13405 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
13406}
13407
13408
13409/* Opcode 0xf3 0x0f 0xe8 - invalid */
13410/* Opcode 0xf2 0x0f 0xe8 - invalid */
13411
13412/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13413FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13414{
13415 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13416 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
13417}
13418
13419
13420/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13421FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13422{
13423 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13424 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
13425}
13426
13427
13428/* Opcode 0xf3 0x0f 0xe9 - invalid */
13429/* Opcode 0xf2 0x0f 0xe9 - invalid */
13430
13431
13432/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13433FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13434{
13435 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13436 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
13437}
13438
13439
13440/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13441FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13442{
13443 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13444 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
13445}
13446
13447
13448/* Opcode 0xf3 0x0f 0xea - invalid */
13449/* Opcode 0xf2 0x0f 0xea - invalid */
13450
13451
13452/** Opcode 0x0f 0xeb - por Pq, Qq */
13453FNIEMOP_DEF(iemOp_por_Pq_Qq)
13454{
13455 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13456 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
13457}
13458
13459
13460/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13461FNIEMOP_DEF(iemOp_por_Vx_Wx)
13462{
13463 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13464 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
13465}
13466
13467
13468/* Opcode 0xf3 0x0f 0xeb - invalid */
13469/* Opcode 0xf2 0x0f 0xeb - invalid */
13470
13471/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13472FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13473{
13474 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13475 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
13476}
13477
13478
13479/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13480FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13481{
13482 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13483 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
13484}
13485
13486
13487/* Opcode 0xf3 0x0f 0xec - invalid */
13488/* Opcode 0xf2 0x0f 0xec - invalid */
13489
13490/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13491FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13492{
13493 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13494 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
13495}
13496
13497
13498/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13499FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13500{
13501 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13502 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
13503}
13504
13505
13506/* Opcode 0xf3 0x0f 0xed - invalid */
13507/* Opcode 0xf2 0x0f 0xed - invalid */
13508
13509
13510/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13511FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13512{
13513 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13514 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13515}
13516
13517
13518/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13519FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13520{
13521 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13522 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13523}
13524
13525
13526/* Opcode 0xf3 0x0f 0xee - invalid */
13527/* Opcode 0xf2 0x0f 0xee - invalid */
13528
13529
13530/** Opcode 0x0f 0xef - pxor Pq, Qq */
13531FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13532{
13533 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13534 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
13535}
13536
13537
13538/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13539FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13540{
13541 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13542 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
13543}
13544
13545
13546/* Opcode 0xf3 0x0f 0xef - invalid */
13547/* Opcode 0xf2 0x0f 0xef - invalid */
13548
13549/* Opcode 0x0f 0xf0 - invalid */
13550/* Opcode 0x66 0x0f 0xf0 - invalid */
13551
13552
13553/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13554FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13555{
13556 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13558 if (IEM_IS_MODRM_REG_MODE(bRm))
13559 {
13560 /*
13561 * Register, register - (not implemented, assuming it raises \#UD).
13562 */
13563 IEMOP_RAISE_INVALID_OPCODE_RET();
13564 }
13565 else
13566 {
13567 /*
13568 * Register, memory.
13569 */
13570 IEM_MC_BEGIN(0, 2);
13571 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13573
13574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
13576 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13577 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13578 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13579 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13580
13581 IEM_MC_ADVANCE_RIP_AND_FINISH();
13582 IEM_MC_END();
13583 }
13584}
13585
13586
13587/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13588FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13589{
13590 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13591 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13592}
13593
13594
13595/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13596FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13597{
13598 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13599 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13600}
13601
13602
13603/* Opcode 0xf2 0x0f 0xf1 - invalid */
13604
13605/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13606FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13607{
13608 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13609 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13610}
13611
13612
13613/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13614FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13615{
13616 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13617 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13618}
13619
13620
13621/* Opcode 0xf2 0x0f 0xf2 - invalid */
13622
13623/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13624FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13625{
13626 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13627 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13628}
13629
13630
13631/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13632FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13633{
13634 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13635 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13636}
13637
13638/* Opcode 0xf2 0x0f 0xf3 - invalid */
13639
13640/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13641FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13642{
13643 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13644 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
13645}
13646
13647
13648/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13649FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13650{
13651 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13652 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
13653}
13654
13655
13656/* Opcode 0xf2 0x0f 0xf4 - invalid */
13657
13658/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13659FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13660{
13661 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13662 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13663}
13664
13665
13666/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13667FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13668{
13669 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13670 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13671}
13672
13673/* Opcode 0xf2 0x0f 0xf5 - invalid */
13674
13675/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13676FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13677{
13678 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13679 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13680}
13681
13682
13683/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13684FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13685{
13686 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13687 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13688}
13689
13690
13691/* Opcode 0xf2 0x0f 0xf6 - invalid */
13692
13693/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13694FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13695/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13696FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13697/* Opcode 0xf2 0x0f 0xf7 - invalid */
13698
13699
13700/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13701FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13702{
13703 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13704 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
13705}
13706
13707
13708/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13709FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13710{
13711 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13712 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
13713}
13714
13715
13716/* Opcode 0xf2 0x0f 0xf8 - invalid */
13717
13718
13719/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13720FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13721{
13722 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13723 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
13724}
13725
13726
13727/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13728FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13729{
13730 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13731 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
13732}
13733
13734
13735/* Opcode 0xf2 0x0f 0xf9 - invalid */
13736
13737
13738/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13739FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13740{
13741 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13742 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
13743}
13744
13745
13746/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13747FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13748{
13749 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13750 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
13751}
13752
13753
13754/* Opcode 0xf2 0x0f 0xfa - invalid */
13755
13756
13757/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13758FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13759{
13760 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13761 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
13762}
13763
13764
13765/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13766FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13767{
13768 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13769 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
13770}
13771
13772
13773/* Opcode 0xf2 0x0f 0xfb - invalid */
13774
13775
13776/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13777FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13778{
13779 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13780 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
13781}
13782
13783
13784/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13785FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13786{
13787 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13788 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
13789}
13790
13791
13792/* Opcode 0xf2 0x0f 0xfc - invalid */
13793
13794
13795/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13796FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13797{
13798 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13799 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
13800}
13801
13802
13803/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
13804FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
13805{
13806 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13807 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
13808}
13809
13810
13811/* Opcode 0xf2 0x0f 0xfd - invalid */
13812
13813
13814/** Opcode 0x0f 0xfe - paddd Pq, Qq */
13815FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
13816{
13817 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13818 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
13819}
13820
13821
13822/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
13823FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
13824{
13825 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13826 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
13827}
13828
13829
13830/* Opcode 0xf2 0x0f 0xfe - invalid */
13831
13832
13833/** Opcode **** 0x0f 0xff - UD0 */
13834FNIEMOP_DEF(iemOp_ud0)
13835{
13836 IEMOP_MNEMONIC(ud0, "ud0");
13837 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
13838 {
13839 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
13840#ifndef TST_IEM_CHECK_MC
13841 if (IEM_IS_MODRM_MEM_MODE(bRm))
13842 {
13843 RTGCPTR GCPtrEff;
13844 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
13845 if (rcStrict != VINF_SUCCESS)
13846 return rcStrict;
13847 }
13848#endif
13849 IEMOP_HLP_DONE_DECODING();
13850 }
13851 IEMOP_RAISE_INVALID_OPCODE_RET();
13852}
13853
13854
13855
13856/**
13857 * Two byte opcode map, first byte 0x0f.
13858 *
13859 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
13860 * check if it needs updating as well when making changes.
13861 */
13862IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
13863{
13864 /* no prefix, 066h prefix f3h prefix, f2h prefix */
13865 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
13866 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
13867 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
13868 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
13869 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
13870 /* 0x05 */ IEMOP_X4(iemOp_syscall),
13871 /* 0x06 */ IEMOP_X4(iemOp_clts),
13872 /* 0x07 */ IEMOP_X4(iemOp_sysret),
13873 /* 0x08 */ IEMOP_X4(iemOp_invd),
13874 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
13875 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
13876 /* 0x0b */ IEMOP_X4(iemOp_ud2),
13877 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
13878 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
13879 /* 0x0e */ IEMOP_X4(iemOp_femms),
13880 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
13881
13882 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
13883 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
13884 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
13885 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13886 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13887 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13888 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
13889 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13890 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
13891 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
13892 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
13893 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
13894 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
13895 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
13896 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
13897 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
13898
13899 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
13900 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
13901 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
13902 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
13903 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
13904 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13905 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
13906 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13907 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13908 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13909 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
13910 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13911 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
13912 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
13913 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13914 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13915
13916 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
13917 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
13918 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
13919 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
13920 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
13921 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
13922 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
13923 /* 0x37 */ IEMOP_X4(iemOp_getsec),
13924 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
13925 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13926 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
13927 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13928 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13929 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13930 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13931 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13932
13933 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
13934 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
13935 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
13936 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
13937 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
13938 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
13939 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
13940 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
13941 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
13942 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
13943 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
13944 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
13945 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
13946 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
13947 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
13948 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
13949
13950 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13951 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
13952 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
13953 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
13954 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13955 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13956 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13957 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13958 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
13959 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
13960 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
13961 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
13962 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
13963 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
13964 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
13965 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
13966
13967 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13968 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13969 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13970 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13971 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13972 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13973 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13974 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13975 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13976 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13977 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13978 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13979 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13980 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13981 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13982 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
13983
13984 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
13985 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
13986 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
13987 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
13988 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13989 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13990 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13991 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13992
13993 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13994 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13995 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13996 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13997 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
13998 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
13999 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
14000 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
14001
14002 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
14003 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
14004 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
14005 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
14006 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
14007 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
14008 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
14009 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
14010 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
14011 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
14012 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
14013 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
14014 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14015 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14016 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14017 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14018
14019 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14020 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14021 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14022 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14023 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14024 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14025 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14026 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14027 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14028 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14029 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14030 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14031 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14032 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14033 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14034 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14035
14036 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14037 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14038 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14039 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14040 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14041 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14042 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14043 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14044 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14045 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14046 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14047 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14048 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14049 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14050 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14051 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14052
14053 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14054 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14055 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14056 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14057 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14058 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14059 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14060 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14061 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14062 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14063 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14064 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14065 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14066 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14067 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14068 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14069
14070 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14071 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14072 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14073 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14074 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14075 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14076 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14077 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14078 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14079 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14080 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14081 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14082 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14083 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14084 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14085 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14086
14087 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14088 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14089 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14090 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14091 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14092 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14093 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14094 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14095 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14096 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14097 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14098 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14099 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14100 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14101 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14102 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14103
14104 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14105 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14106 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14107 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14108 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14109 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14110 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14111 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14112 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14113 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14114 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14115 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14116 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14117 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14118 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14119 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14120
14121 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14122 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14123 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14124 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14125 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14126 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14127 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14128 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14129 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14130 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14131 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14132 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14133 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14134 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14135 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14136 /* 0xff */ IEMOP_X4(iemOp_ud0),
14137};
14138AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14139
14140/** @} */
14141
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette