VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 97529

Last change on this file since 97529 was 97479, checked in by vboxsync, 2 years ago

IEM: Fixed many instances of potentially broken EA calculation where we didn't add the size of the following immediate.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 483.1 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 97479 2022-11-09 11:02:55Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * MMX, MMX.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
53 IEM_MC_BEGIN(2, 0);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_FPU_TO_MMX_MODE();
59
60 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
61 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
62 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
63 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
64
65 IEM_MC_ADVANCE_RIP_AND_FINISH();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * MMX, [mem64].
72 */
73 IEM_MC_BEGIN(2, 2);
74 IEM_MC_ARG(uint64_t *, pDst, 0);
75 IEM_MC_LOCAL(uint64_t, uSrc);
76 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
81 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_FPU_USAGE();
85 IEM_MC_FPU_TO_MMX_MODE();
86
87 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
88 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
89 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
90
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94}
95
96
97/**
98 * Common worker for MMX instructions on the form:
99 * pxxx mm1, mm2/mem64
100 *
101 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
102 * no FXSAVE state, just the operands.
103 */
104FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
105{
106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
107 if (IEM_IS_MODRM_REG_MODE(bRm))
108 {
109 /*
110 * MMX, MMX.
111 */
112 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
113 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
115 IEM_MC_BEGIN(2, 0);
116 IEM_MC_ARG(uint64_t *, pDst, 0);
117 IEM_MC_ARG(uint64_t const *, pSrc, 1);
118 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
119 IEM_MC_PREPARE_FPU_USAGE();
120 IEM_MC_FPU_TO_MMX_MODE();
121
122 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
123 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
124 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
125 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
126
127 IEM_MC_ADVANCE_RIP_AND_FINISH();
128 IEM_MC_END();
129 }
130 else
131 {
132 /*
133 * MMX, [mem64].
134 */
135 IEM_MC_BEGIN(2, 2);
136 IEM_MC_ARG(uint64_t *, pDst, 0);
137 IEM_MC_LOCAL(uint64_t, uSrc);
138 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
140
141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
143 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
144 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
145
146 IEM_MC_PREPARE_FPU_USAGE();
147 IEM_MC_FPU_TO_MMX_MODE();
148
149 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
150 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
151 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
152
153 IEM_MC_ADVANCE_RIP_AND_FINISH();
154 IEM_MC_END();
155 }
156}
157
158
159/**
160 * Common worker for MMX instructions on the form:
161 * pxxx mm1, mm2/mem64
162 * for instructions introduced with SSE.
163 */
164FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
165{
166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
167 if (IEM_IS_MODRM_REG_MODE(bRm))
168 {
169 /*
170 * MMX, MMX.
171 */
172 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
173 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
175 IEM_MC_BEGIN(2, 0);
176 IEM_MC_ARG(uint64_t *, pDst, 0);
177 IEM_MC_ARG(uint64_t const *, pSrc, 1);
178 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
179 IEM_MC_PREPARE_FPU_USAGE();
180 IEM_MC_FPU_TO_MMX_MODE();
181
182 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
183 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
184 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
185 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
186
187 IEM_MC_ADVANCE_RIP_AND_FINISH();
188 IEM_MC_END();
189 }
190 else
191 {
192 /*
193 * MMX, [mem64].
194 */
195 IEM_MC_BEGIN(2, 2);
196 IEM_MC_ARG(uint64_t *, pDst, 0);
197 IEM_MC_LOCAL(uint64_t, uSrc);
198 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200
201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
203 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
204 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
205
206 IEM_MC_PREPARE_FPU_USAGE();
207 IEM_MC_FPU_TO_MMX_MODE();
208
209 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
210 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
211 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
212
213 IEM_MC_ADVANCE_RIP_AND_FINISH();
214 IEM_MC_END();
215 }
216}
217
218
219/**
220 * Common worker for MMX instructions on the form:
221 * pxxx mm1, mm2/mem64
222 * for instructions introduced with SSE.
223 *
224 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
225 * no FXSAVE state, just the operands.
226 */
227FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
228{
229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
230 if (IEM_IS_MODRM_REG_MODE(bRm))
231 {
232 /*
233 * MMX, MMX.
234 */
235 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
236 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
238 IEM_MC_BEGIN(2, 0);
239 IEM_MC_ARG(uint64_t *, pDst, 0);
240 IEM_MC_ARG(uint64_t const *, pSrc, 1);
241 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
242 IEM_MC_PREPARE_FPU_USAGE();
243 IEM_MC_FPU_TO_MMX_MODE();
244
245 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
246 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
247 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
248 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
249
250 IEM_MC_ADVANCE_RIP_AND_FINISH();
251 IEM_MC_END();
252 }
253 else
254 {
255 /*
256 * MMX, [mem64].
257 */
258 IEM_MC_BEGIN(2, 2);
259 IEM_MC_ARG(uint64_t *, pDst, 0);
260 IEM_MC_LOCAL(uint64_t, uSrc);
261 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
263
264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
267 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
268
269 IEM_MC_PREPARE_FPU_USAGE();
270 IEM_MC_FPU_TO_MMX_MODE();
271
272 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
273 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
274 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
275
276 IEM_MC_ADVANCE_RIP_AND_FINISH();
277 IEM_MC_END();
278 }
279}
280
281
282/**
283 * Common worker for MMX instructions on the form:
284 * pxxx mm1, mm2/mem64
285 * that was introduced with SSE2.
286 */
287FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported)
288{
289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
290 if (IEM_IS_MODRM_REG_MODE(bRm))
291 {
292 /*
293 * MMX, MMX.
294 */
295 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
296 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
298 IEM_MC_BEGIN(2, 0);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_ARG(uint64_t const *, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
302 IEM_MC_PREPARE_FPU_USAGE();
303 IEM_MC_FPU_TO_MMX_MODE();
304
305 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
306 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
307 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
308 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
309
310 IEM_MC_ADVANCE_RIP_AND_FINISH();
311 IEM_MC_END();
312 }
313 else
314 {
315 /*
316 * MMX, [mem64].
317 */
318 IEM_MC_BEGIN(2, 2);
319 IEM_MC_ARG(uint64_t *, pDst, 0);
320 IEM_MC_LOCAL(uint64_t, uSrc);
321 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
323
324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
326 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
327 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
328
329 IEM_MC_PREPARE_FPU_USAGE();
330 IEM_MC_FPU_TO_MMX_MODE();
331
332 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
333 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
334 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
335
336 IEM_MC_ADVANCE_RIP_AND_FINISH();
337 IEM_MC_END();
338 }
339}
340
341
342/**
343 * Common worker for SSE instructions of the form:
344 * pxxx xmm1, xmm2/mem128
345 *
346 * Proper alignment of the 128-bit operand is enforced.
347 * SSE cpuid checks. No SIMD FP exceptions.
348 *
349 * @sa iemOpCommonSse2_FullFull_To_Full
350 */
351FNIEMOP_DEF_1(iemOpCommonSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
352{
353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
354 if (IEM_IS_MODRM_REG_MODE(bRm))
355 {
356 /*
357 * XMM, XMM.
358 */
359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
360 IEM_MC_BEGIN(2, 0);
361 IEM_MC_ARG(PRTUINT128U, pDst, 0);
362 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
364 IEM_MC_PREPARE_SSE_USAGE();
365 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
366 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
367 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
368 IEM_MC_ADVANCE_RIP_AND_FINISH();
369 IEM_MC_END();
370 }
371 else
372 {
373 /*
374 * XMM, [mem128].
375 */
376 IEM_MC_BEGIN(2, 2);
377 IEM_MC_ARG(PRTUINT128U, pDst, 0);
378 IEM_MC_LOCAL(RTUINT128U, uSrc);
379 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
381
382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
384 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
385 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
386
387 IEM_MC_PREPARE_SSE_USAGE();
388 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
389 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
390
391 IEM_MC_ADVANCE_RIP_AND_FINISH();
392 IEM_MC_END();
393 }
394}
395
396
397/**
398 * Common worker for SSE2 instructions on the forms:
399 * pxxx xmm1, xmm2/mem128
400 *
401 * Proper alignment of the 128-bit operand is enforced.
402 * Exceptions type 4. SSE2 cpuid checks.
403 *
404 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
405 */
406FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
407{
408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
409 if (IEM_IS_MODRM_REG_MODE(bRm))
410 {
411 /*
412 * XMM, XMM.
413 */
414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
415 IEM_MC_BEGIN(2, 0);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
418 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
419 IEM_MC_PREPARE_SSE_USAGE();
420 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
421 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
422 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
423 IEM_MC_ADVANCE_RIP_AND_FINISH();
424 IEM_MC_END();
425 }
426 else
427 {
428 /*
429 * XMM, [mem128].
430 */
431 IEM_MC_BEGIN(2, 2);
432 IEM_MC_ARG(PRTUINT128U, pDst, 0);
433 IEM_MC_LOCAL(RTUINT128U, uSrc);
434 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
436
437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
439 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
440 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
441
442 IEM_MC_PREPARE_SSE_USAGE();
443 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
444 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
445
446 IEM_MC_ADVANCE_RIP_AND_FINISH();
447 IEM_MC_END();
448 }
449}
450
451
452/**
453 * Common worker for SSE2 instructions on the forms:
454 * pxxx xmm1, xmm2/mem128
455 *
456 * Proper alignment of the 128-bit operand is enforced.
457 * Exceptions type 4. SSE2 cpuid checks.
458 *
459 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
460 * no FXSAVE state, just the operands.
461 *
462 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
463 */
464FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
465{
466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
467 if (IEM_IS_MODRM_REG_MODE(bRm))
468 {
469 /*
470 * XMM, XMM.
471 */
472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
473 IEM_MC_BEGIN(2, 0);
474 IEM_MC_ARG(PRTUINT128U, pDst, 0);
475 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
476 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
477 IEM_MC_PREPARE_SSE_USAGE();
478 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
479 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
480 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
481 IEM_MC_ADVANCE_RIP_AND_FINISH();
482 IEM_MC_END();
483 }
484 else
485 {
486 /*
487 * XMM, [mem128].
488 */
489 IEM_MC_BEGIN(2, 2);
490 IEM_MC_ARG(PRTUINT128U, pDst, 0);
491 IEM_MC_LOCAL(RTUINT128U, uSrc);
492 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
494
495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
497 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
498 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
499
500 IEM_MC_PREPARE_SSE_USAGE();
501 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
502 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
503
504 IEM_MC_ADVANCE_RIP_AND_FINISH();
505 IEM_MC_END();
506 }
507}
508
509
510/**
511 * Common worker for MMX instructions on the forms:
512 * pxxxx mm1, mm2/mem32
513 *
514 * The 2nd operand is the first half of a register, which in the memory case
515 * means a 32-bit memory access.
516 */
517FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, FNIEMAIMPLMEDIAOPTF2U64, pfnU64)
518{
519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
520 if (IEM_IS_MODRM_REG_MODE(bRm))
521 {
522 /*
523 * MMX, MMX.
524 */
525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
526 IEM_MC_BEGIN(2, 0);
527 IEM_MC_ARG(uint64_t *, puDst, 0);
528 IEM_MC_ARG(uint64_t const *, puSrc, 1);
529 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
530 IEM_MC_PREPARE_FPU_USAGE();
531 IEM_MC_FPU_TO_MMX_MODE();
532
533 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
534 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
535 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
536 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
537
538 IEM_MC_ADVANCE_RIP_AND_FINISH();
539 IEM_MC_END();
540 }
541 else
542 {
543 /*
544 * MMX, [mem32].
545 */
546 IEM_MC_BEGIN(2, 2);
547 IEM_MC_ARG(uint64_t *, puDst, 0);
548 IEM_MC_LOCAL(uint64_t, uSrc);
549 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
551
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
554 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
555 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
556
557 IEM_MC_PREPARE_FPU_USAGE();
558 IEM_MC_FPU_TO_MMX_MODE();
559
560 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
562 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
563
564 IEM_MC_ADVANCE_RIP_AND_FINISH();
565 IEM_MC_END();
566 }
567}
568
569
570/**
571 * Common worker for SSE instructions on the forms:
572 * pxxxx xmm1, xmm2/mem128
573 *
574 * The 2nd operand is the first half of a register, which in the memory case
575 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
576 *
577 * Exceptions type 4.
578 */
579FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
580{
581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
582 if (IEM_IS_MODRM_REG_MODE(bRm))
583 {
584 /*
585 * XMM, XMM.
586 */
587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
588 IEM_MC_BEGIN(2, 0);
589 IEM_MC_ARG(PRTUINT128U, puDst, 0);
590 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
591 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
594 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
595 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
596 IEM_MC_ADVANCE_RIP_AND_FINISH();
597 IEM_MC_END();
598 }
599 else
600 {
601 /*
602 * XMM, [mem128].
603 */
604 IEM_MC_BEGIN(2, 2);
605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
606 IEM_MC_LOCAL(RTUINT128U, uSrc);
607 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
609
610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
612 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
613 /** @todo Most CPUs probably only read the low qword. We read everything to
614 * make sure we apply segmentation and alignment checks correctly.
615 * When we have time, it would be interesting to explore what real
616 * CPUs actually does and whether it will do a TLB load for the high
617 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
618 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
619
620 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
621 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
623
624 IEM_MC_ADVANCE_RIP_AND_FINISH();
625 IEM_MC_END();
626 }
627}
628
629
630/**
631 * Common worker for SSE2 instructions on the forms:
632 * pxxxx xmm1, xmm2/mem128
633 *
634 * The 2nd operand is the first half of a register, which in the memory case
635 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
636 *
637 * Exceptions type 4.
638 */
639FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
640{
641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
642 if (IEM_IS_MODRM_REG_MODE(bRm))
643 {
644 /*
645 * XMM, XMM.
646 */
647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
648 IEM_MC_BEGIN(2, 0);
649 IEM_MC_ARG(PRTUINT128U, puDst, 0);
650 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
651 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
653 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
654 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
655 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
656 IEM_MC_ADVANCE_RIP_AND_FINISH();
657 IEM_MC_END();
658 }
659 else
660 {
661 /*
662 * XMM, [mem128].
663 */
664 IEM_MC_BEGIN(2, 2);
665 IEM_MC_ARG(PRTUINT128U, puDst, 0);
666 IEM_MC_LOCAL(RTUINT128U, uSrc);
667 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
669
670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
672 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
673 /** @todo Most CPUs probably only read the low qword. We read everything to
674 * make sure we apply segmentation and alignment checks correctly.
675 * When we have time, it would be interesting to explore what real
676 * CPUs actually does and whether it will do a TLB load for the high
677 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
678 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
679
680 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
681 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
682 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
683
684 IEM_MC_ADVANCE_RIP_AND_FINISH();
685 IEM_MC_END();
686 }
687}
688
689
690/**
691 * Common worker for MMX instructions on the form:
692 * pxxxx mm1, mm2/mem64
693 *
694 * The 2nd operand is the second half of a register, which in the memory case
695 * means a 64-bit memory access for MMX.
696 */
697FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
698{
699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
700 if (IEM_IS_MODRM_REG_MODE(bRm))
701 {
702 /*
703 * MMX, MMX.
704 */
705 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
706 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
708 IEM_MC_BEGIN(2, 0);
709 IEM_MC_ARG(uint64_t *, puDst, 0);
710 IEM_MC_ARG(uint64_t const *, puSrc, 1);
711 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
712 IEM_MC_PREPARE_FPU_USAGE();
713 IEM_MC_FPU_TO_MMX_MODE();
714
715 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
716 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
717 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
718 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
719
720 IEM_MC_ADVANCE_RIP_AND_FINISH();
721 IEM_MC_END();
722 }
723 else
724 {
725 /*
726 * MMX, [mem64].
727 */
728 IEM_MC_BEGIN(2, 2);
729 IEM_MC_ARG(uint64_t *, puDst, 0);
730 IEM_MC_LOCAL(uint64_t, uSrc);
731 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
733
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
736 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
737 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
738
739 IEM_MC_PREPARE_FPU_USAGE();
740 IEM_MC_FPU_TO_MMX_MODE();
741
742 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
743 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
744 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
745
746 IEM_MC_ADVANCE_RIP_AND_FINISH();
747 IEM_MC_END();
748 }
749}
750
751
752/**
753 * Common worker for SSE instructions on the form:
754 * pxxxx xmm1, xmm2/mem128
755 *
756 * The 2nd operand is the second half of a register, which for SSE a 128-bit
757 * aligned access where it may read the full 128 bits or only the upper 64 bits.
758 *
759 * Exceptions type 4.
760 */
761FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
762{
763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
764 if (IEM_IS_MODRM_REG_MODE(bRm))
765 {
766 /*
767 * XMM, XMM.
768 */
769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
770 IEM_MC_BEGIN(2, 0);
771 IEM_MC_ARG(PRTUINT128U, puDst, 0);
772 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
773 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
774 IEM_MC_PREPARE_SSE_USAGE();
775 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
776 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
777 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
778 IEM_MC_ADVANCE_RIP_AND_FINISH();
779 IEM_MC_END();
780 }
781 else
782 {
783 /*
784 * XMM, [mem128].
785 */
786 IEM_MC_BEGIN(2, 2);
787 IEM_MC_ARG(PRTUINT128U, puDst, 0);
788 IEM_MC_LOCAL(RTUINT128U, uSrc);
789 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
791
792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
794 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
795 /** @todo Most CPUs probably only read the high qword. We read everything to
796 * make sure we apply segmentation and alignment checks correctly.
797 * When we have time, it would be interesting to explore what real
798 * CPUs actually does and whether it will do a TLB load for the lower
799 * part or skip any associated \#PF. */
800 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801
802 IEM_MC_PREPARE_SSE_USAGE();
803 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
804 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
805
806 IEM_MC_ADVANCE_RIP_AND_FINISH();
807 IEM_MC_END();
808 }
809}
810
811
812/**
813 * Common worker for SSE instructions on the forms:
814 * pxxs xmm1, xmm2/mem128
815 *
816 * Proper alignment of the 128-bit operand is enforced.
817 * Exceptions type 2. SSE cpuid checks.
818 *
819 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
820 */
821FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
822{
823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
824 if (IEM_IS_MODRM_REG_MODE(bRm))
825 {
826 /*
827 * XMM128, XMM128.
828 */
829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
830 IEM_MC_BEGIN(3, 1);
831 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
832 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
833 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
834 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
835 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
836 IEM_MC_PREPARE_SSE_USAGE();
837 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
838 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
839 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
840 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
841 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
842
843 IEM_MC_ADVANCE_RIP_AND_FINISH();
844 IEM_MC_END();
845 }
846 else
847 {
848 /*
849 * XMM128, [mem128].
850 */
851 IEM_MC_BEGIN(3, 2);
852 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
853 IEM_MC_LOCAL(X86XMMREG, uSrc2);
854 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
855 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
856 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
858
859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
861 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
862 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
863
864 IEM_MC_PREPARE_SSE_USAGE();
865 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
866 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
867 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
868 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
869
870 IEM_MC_ADVANCE_RIP_AND_FINISH();
871 IEM_MC_END();
872 }
873}
874
875
876/**
877 * Common worker for SSE instructions on the forms:
878 * pxxs xmm1, xmm2/mem32
879 *
880 * Proper alignment of the 128-bit operand is enforced.
881 * Exceptions type 2. SSE cpuid checks.
882 *
883 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
884 */
885FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
886{
887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
888 if (IEM_IS_MODRM_REG_MODE(bRm))
889 {
890 /*
891 * XMM128, XMM32.
892 */
893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
894 IEM_MC_BEGIN(3, 1);
895 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
896 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
897 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
898 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
899 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
900 IEM_MC_PREPARE_SSE_USAGE();
901 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
902 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
903 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
904 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
905 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
906
907 IEM_MC_ADVANCE_RIP_AND_FINISH();
908 IEM_MC_END();
909 }
910 else
911 {
912 /*
913 * XMM128, [mem32].
914 */
915 IEM_MC_BEGIN(3, 2);
916 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
917 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
918 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
919 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
920 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
922
923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
926 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
927
928 IEM_MC_PREPARE_SSE_USAGE();
929 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
930 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
931 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
932 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
933
934 IEM_MC_ADVANCE_RIP_AND_FINISH();
935 IEM_MC_END();
936 }
937}
938
939
940/**
941 * Common worker for SSE2 instructions on the forms:
942 * pxxd xmm1, xmm2/mem128
943 *
944 * Proper alignment of the 128-bit operand is enforced.
945 * Exceptions type 2. SSE cpuid checks.
946 *
947 * @sa iemOpCommonSseFp_FullFull_To_Full
948 */
949FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
950{
951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
952 if (IEM_IS_MODRM_REG_MODE(bRm))
953 {
954 /*
955 * XMM128, XMM128.
956 */
957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
958 IEM_MC_BEGIN(3, 1);
959 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
960 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
961 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
962 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
963 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
964 IEM_MC_PREPARE_SSE_USAGE();
965 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
966 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
967 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
968 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
969 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
970
971 IEM_MC_ADVANCE_RIP_AND_FINISH();
972 IEM_MC_END();
973 }
974 else
975 {
976 /*
977 * XMM128, [mem128].
978 */
979 IEM_MC_BEGIN(3, 2);
980 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
981 IEM_MC_LOCAL(X86XMMREG, uSrc2);
982 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
983 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
984 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
989 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
990 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
991
992 IEM_MC_PREPARE_SSE_USAGE();
993 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
994 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
995 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
996 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
997
998 IEM_MC_ADVANCE_RIP_AND_FINISH();
999 IEM_MC_END();
1000 }
1001}
1002
1003
1004/**
1005 * Common worker for SSE2 instructions on the forms:
1006 * pxxs xmm1, xmm2/mem64
1007 *
1008 * Proper alignment of the 128-bit operand is enforced.
1009 * Exceptions type 2. SSE2 cpuid checks.
1010 *
1011 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1012 */
1013FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
1014{
1015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1016 if (IEM_IS_MODRM_REG_MODE(bRm))
1017 {
1018 /*
1019 * XMM, XMM.
1020 */
1021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1022 IEM_MC_BEGIN(3, 1);
1023 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1024 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1025 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1026 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
1027 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1028 IEM_MC_PREPARE_SSE_USAGE();
1029 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1031 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
1032 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1033 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1034
1035 IEM_MC_ADVANCE_RIP_AND_FINISH();
1036 IEM_MC_END();
1037 }
1038 else
1039 {
1040 /*
1041 * XMM, [mem64].
1042 */
1043 IEM_MC_BEGIN(3, 2);
1044 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1045 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
1046 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1047 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1048 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
1049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1050
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1053 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1054 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1055
1056 IEM_MC_PREPARE_SSE_USAGE();
1057 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1058 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
1059 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1060 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1061
1062 IEM_MC_ADVANCE_RIP_AND_FINISH();
1063 IEM_MC_END();
1064 }
1065}
1066
1067
1068/**
1069 * Common worker for SSE2 instructions on the form:
1070 * pxxxx xmm1, xmm2/mem128
1071 *
1072 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1073 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1074 *
1075 * Exceptions type 4.
1076 */
1077FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1078{
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /*
1083 * XMM, XMM.
1084 */
1085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1086 IEM_MC_BEGIN(2, 0);
1087 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1088 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1089 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1090 IEM_MC_PREPARE_SSE_USAGE();
1091 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1092 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1093 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1094 IEM_MC_ADVANCE_RIP_AND_FINISH();
1095 IEM_MC_END();
1096 }
1097 else
1098 {
1099 /*
1100 * XMM, [mem128].
1101 */
1102 IEM_MC_BEGIN(2, 2);
1103 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1104 IEM_MC_LOCAL(RTUINT128U, uSrc);
1105 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1107
1108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1110 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1111 /** @todo Most CPUs probably only read the high qword. We read everything to
1112 * make sure we apply segmentation and alignment checks correctly.
1113 * When we have time, it would be interesting to explore what real
1114 * CPUs actually does and whether it will do a TLB load for the lower
1115 * part or skip any associated \#PF. */
1116 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1117
1118 IEM_MC_PREPARE_SSE_USAGE();
1119 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1120 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1121
1122 IEM_MC_ADVANCE_RIP_AND_FINISH();
1123 IEM_MC_END();
1124 }
1125}
1126
1127
1128/**
1129 * Common worker for SSE3 instructions on the forms:
1130 * hxxx xmm1, xmm2/mem128
1131 *
1132 * Proper alignment of the 128-bit operand is enforced.
1133 * Exceptions type 2. SSE3 cpuid checks.
1134 *
1135 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1136 */
1137FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1138{
1139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1140 if (IEM_IS_MODRM_REG_MODE(bRm))
1141 {
1142 /*
1143 * XMM, XMM.
1144 */
1145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1146 IEM_MC_BEGIN(3, 1);
1147 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1148 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1149 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1150 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1151 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1152 IEM_MC_PREPARE_SSE_USAGE();
1153 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1154 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1155 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1156 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1157 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1158
1159 IEM_MC_ADVANCE_RIP_AND_FINISH();
1160 IEM_MC_END();
1161 }
1162 else
1163 {
1164 /*
1165 * XMM, [mem128].
1166 */
1167 IEM_MC_BEGIN(3, 2);
1168 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1169 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1170 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1171 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1172 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1174
1175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1177 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1178 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1179
1180 IEM_MC_PREPARE_SSE_USAGE();
1181 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1182 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1183 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1184 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1185
1186 IEM_MC_ADVANCE_RIP_AND_FINISH();
1187 IEM_MC_END();
1188 }
1189}
1190
1191
1192/** Opcode 0x0f 0x00 /0. */
1193FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1194{
1195 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1196 IEMOP_HLP_MIN_286();
1197 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1198
1199 if (IEM_IS_MODRM_REG_MODE(bRm))
1200 {
1201 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1202 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1203 }
1204
1205 /* Ignore operand size here, memory refs are always 16-bit. */
1206 IEM_MC_BEGIN(2, 0);
1207 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1208 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1210 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1211 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1212 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1213 IEM_MC_END();
1214 return VINF_SUCCESS;
1215}
1216
1217
1218/** Opcode 0x0f 0x00 /1. */
1219FNIEMOPRM_DEF(iemOp_Grp6_str)
1220{
1221 IEMOP_MNEMONIC(str, "str Rv/Mw");
1222 IEMOP_HLP_MIN_286();
1223 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1224
1225
1226 if (IEM_IS_MODRM_REG_MODE(bRm))
1227 {
1228 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1229 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1230 }
1231
1232 /* Ignore operand size here, memory refs are always 16-bit. */
1233 IEM_MC_BEGIN(2, 0);
1234 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1235 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1237 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1238 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1239 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1240 IEM_MC_END();
1241 return VINF_SUCCESS;
1242}
1243
1244
1245/** Opcode 0x0f 0x00 /2. */
1246FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1247{
1248 IEMOP_MNEMONIC(lldt, "lldt Ew");
1249 IEMOP_HLP_MIN_286();
1250 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1251
1252 if (IEM_IS_MODRM_REG_MODE(bRm))
1253 {
1254 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1255 IEM_MC_BEGIN(1, 0);
1256 IEM_MC_ARG(uint16_t, u16Sel, 0);
1257 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1258 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1259 IEM_MC_END();
1260 }
1261 else
1262 {
1263 IEM_MC_BEGIN(1, 1);
1264 IEM_MC_ARG(uint16_t, u16Sel, 0);
1265 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1267 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1268 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1269 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1270 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1271 IEM_MC_END();
1272 }
1273 return VINF_SUCCESS;
1274}
1275
1276
1277/** Opcode 0x0f 0x00 /3. */
1278FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1279{
1280 IEMOP_MNEMONIC(ltr, "ltr Ew");
1281 IEMOP_HLP_MIN_286();
1282 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1283
1284 if (IEM_IS_MODRM_REG_MODE(bRm))
1285 {
1286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1287 IEM_MC_BEGIN(1, 0);
1288 IEM_MC_ARG(uint16_t, u16Sel, 0);
1289 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1290 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1291 IEM_MC_END();
1292 }
1293 else
1294 {
1295 IEM_MC_BEGIN(1, 1);
1296 IEM_MC_ARG(uint16_t, u16Sel, 0);
1297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1300 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1301 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1302 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1303 IEM_MC_END();
1304 }
1305 return VINF_SUCCESS;
1306}
1307
1308
1309/** Opcode 0x0f 0x00 /3. */
1310FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1311{
1312 IEMOP_HLP_MIN_286();
1313 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1314
1315 if (IEM_IS_MODRM_REG_MODE(bRm))
1316 {
1317 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1318 IEM_MC_BEGIN(2, 0);
1319 IEM_MC_ARG(uint16_t, u16Sel, 0);
1320 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1321 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1322 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1323 IEM_MC_END();
1324 }
1325 else
1326 {
1327 IEM_MC_BEGIN(2, 1);
1328 IEM_MC_ARG(uint16_t, u16Sel, 0);
1329 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1332 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1333 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1334 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1335 IEM_MC_END();
1336 }
1337 return VINF_SUCCESS;
1338}
1339
1340
1341/** Opcode 0x0f 0x00 /4. */
1342FNIEMOPRM_DEF(iemOp_Grp6_verr)
1343{
1344 IEMOP_MNEMONIC(verr, "verr Ew");
1345 IEMOP_HLP_MIN_286();
1346 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1347}
1348
1349
1350/** Opcode 0x0f 0x00 /5. */
1351FNIEMOPRM_DEF(iemOp_Grp6_verw)
1352{
1353 IEMOP_MNEMONIC(verw, "verw Ew");
1354 IEMOP_HLP_MIN_286();
1355 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1356}
1357
1358
1359/**
1360 * Group 6 jump table.
1361 */
1362IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1363{
1364 iemOp_Grp6_sldt,
1365 iemOp_Grp6_str,
1366 iemOp_Grp6_lldt,
1367 iemOp_Grp6_ltr,
1368 iemOp_Grp6_verr,
1369 iemOp_Grp6_verw,
1370 iemOp_InvalidWithRM,
1371 iemOp_InvalidWithRM
1372};
1373
1374/** Opcode 0x0f 0x00. */
1375FNIEMOP_DEF(iemOp_Grp6)
1376{
1377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1378 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1379}
1380
1381
1382/** Opcode 0x0f 0x01 /0. */
1383FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1384{
1385 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1386 IEMOP_HLP_MIN_286();
1387 IEMOP_HLP_64BIT_OP_SIZE();
1388 IEM_MC_BEGIN(2, 1);
1389 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1390 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1393 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1394 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1395 IEM_MC_END();
1396 return VINF_SUCCESS;
1397}
1398
1399
1400/** Opcode 0x0f 0x01 /0. */
1401FNIEMOP_DEF(iemOp_Grp7_vmcall)
1402{
1403 IEMOP_MNEMONIC(vmcall, "vmcall");
1404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1405
1406 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1407 want all hypercalls regardless of instruction used, and if a
1408 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1409 (NEM/win makes ASSUMPTIONS about this behavior.) */
1410 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
1411}
1412
1413
1414/** Opcode 0x0f 0x01 /0. */
1415#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1416FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1417{
1418 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1419 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1420 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1421 IEMOP_HLP_DONE_DECODING();
1422 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
1423}
1424#else
1425FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1426{
1427 IEMOP_BITCH_ABOUT_STUB();
1428 return IEMOP_RAISE_INVALID_OPCODE();
1429}
1430#endif
1431
1432
1433/** Opcode 0x0f 0x01 /0. */
1434#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1435FNIEMOP_DEF(iemOp_Grp7_vmresume)
1436{
1437 IEMOP_MNEMONIC(vmresume, "vmresume");
1438 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1439 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1440 IEMOP_HLP_DONE_DECODING();
1441 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
1442}
1443#else
1444FNIEMOP_DEF(iemOp_Grp7_vmresume)
1445{
1446 IEMOP_BITCH_ABOUT_STUB();
1447 return IEMOP_RAISE_INVALID_OPCODE();
1448}
1449#endif
1450
1451
1452/** Opcode 0x0f 0x01 /0. */
1453#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1454FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1455{
1456 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1457 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1458 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1459 IEMOP_HLP_DONE_DECODING();
1460 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
1461}
1462#else
1463FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1464{
1465 IEMOP_BITCH_ABOUT_STUB();
1466 return IEMOP_RAISE_INVALID_OPCODE();
1467}
1468#endif
1469
1470
1471/** Opcode 0x0f 0x01 /1. */
1472FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1473{
1474 IEMOP_MNEMONIC(sidt, "sidt Ms");
1475 IEMOP_HLP_MIN_286();
1476 IEMOP_HLP_64BIT_OP_SIZE();
1477 IEM_MC_BEGIN(2, 1);
1478 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1479 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1482 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1483 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1484 IEM_MC_END();
1485 return VINF_SUCCESS;
1486}
1487
1488
1489/** Opcode 0x0f 0x01 /1. */
1490FNIEMOP_DEF(iemOp_Grp7_monitor)
1491{
1492 IEMOP_MNEMONIC(monitor, "monitor");
1493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1494 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1495}
1496
1497
1498/** Opcode 0x0f 0x01 /1. */
1499FNIEMOP_DEF(iemOp_Grp7_mwait)
1500{
1501 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1503 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
1504}
1505
1506
1507/** Opcode 0x0f 0x01 /2. */
1508FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1509{
1510 IEMOP_MNEMONIC(lgdt, "lgdt");
1511 IEMOP_HLP_64BIT_OP_SIZE();
1512 IEM_MC_BEGIN(3, 1);
1513 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1514 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1515 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1518 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1519 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1520 IEM_MC_END();
1521 return VINF_SUCCESS;
1522}
1523
1524
1525/** Opcode 0x0f 0x01 0xd0. */
1526FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1527{
1528 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1529 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1530 {
1531 /** @todo r=ramshankar: We should use
1532 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1533 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1534 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1535 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1536 }
1537 return IEMOP_RAISE_INVALID_OPCODE();
1538}
1539
1540
1541/** Opcode 0x0f 0x01 0xd1. */
1542FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1543{
1544 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1545 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1546 {
1547 /** @todo r=ramshankar: We should use
1548 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1549 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1550 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1551 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1552 }
1553 return IEMOP_RAISE_INVALID_OPCODE();
1554}
1555
1556
1557/** Opcode 0x0f 0x01 /3. */
1558FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1559{
1560 IEMOP_MNEMONIC(lidt, "lidt");
1561 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1562 ? IEMMODE_64BIT
1563 : pVCpu->iem.s.enmEffOpSize;
1564 IEM_MC_BEGIN(3, 1);
1565 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1566 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1567 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1570 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1571 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1572 IEM_MC_END();
1573 return VINF_SUCCESS;
1574}
1575
1576
1577/** Opcode 0x0f 0x01 0xd8. */
1578#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1579FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1580{
1581 IEMOP_MNEMONIC(vmrun, "vmrun");
1582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1583 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
1584}
1585#else
1586FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1587#endif
1588
1589/** Opcode 0x0f 0x01 0xd9. */
1590FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1591{
1592 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1594
1595 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1596 want all hypercalls regardless of instruction used, and if a
1597 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1598 (NEM/win makes ASSUMPTIONS about this behavior.) */
1599 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
1600}
1601
1602/** Opcode 0x0f 0x01 0xda. */
1603#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1604FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1605{
1606 IEMOP_MNEMONIC(vmload, "vmload");
1607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1608 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
1609}
1610#else
1611FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1612#endif
1613
1614
1615/** Opcode 0x0f 0x01 0xdb. */
1616#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1617FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1618{
1619 IEMOP_MNEMONIC(vmsave, "vmsave");
1620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1621 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
1622}
1623#else
1624FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1625#endif
1626
1627
1628/** Opcode 0x0f 0x01 0xdc. */
1629#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1630FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1631{
1632 IEMOP_MNEMONIC(stgi, "stgi");
1633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1634 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
1635}
1636#else
1637FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1638#endif
1639
1640
1641/** Opcode 0x0f 0x01 0xdd. */
1642#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1643FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1644{
1645 IEMOP_MNEMONIC(clgi, "clgi");
1646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1647 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
1648}
1649#else
1650FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1651#endif
1652
1653
1654/** Opcode 0x0f 0x01 0xdf. */
1655#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1656FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1657{
1658 IEMOP_MNEMONIC(invlpga, "invlpga");
1659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1660 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
1661}
1662#else
1663FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1664#endif
1665
1666
1667/** Opcode 0x0f 0x01 0xde. */
1668#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1669FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1670{
1671 IEMOP_MNEMONIC(skinit, "skinit");
1672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1673 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
1674}
1675#else
1676FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1677#endif
1678
1679
1680/** Opcode 0x0f 0x01 /4. */
1681FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1682{
1683 IEMOP_MNEMONIC(smsw, "smsw");
1684 IEMOP_HLP_MIN_286();
1685 if (IEM_IS_MODRM_REG_MODE(bRm))
1686 {
1687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1688 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1689 }
1690
1691 /* Ignore operand size here, memory refs are always 16-bit. */
1692 IEM_MC_BEGIN(2, 0);
1693 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1694 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1697 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1698 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1699 IEM_MC_END();
1700 return VINF_SUCCESS;
1701}
1702
1703
1704/** Opcode 0x0f 0x01 /6. */
1705FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1706{
1707 /* The operand size is effectively ignored, all is 16-bit and only the
1708 lower 3-bits are used. */
1709 IEMOP_MNEMONIC(lmsw, "lmsw");
1710 IEMOP_HLP_MIN_286();
1711 if (IEM_IS_MODRM_REG_MODE(bRm))
1712 {
1713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1714 IEM_MC_BEGIN(2, 0);
1715 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1716 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1717 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1718 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1719 IEM_MC_END();
1720 }
1721 else
1722 {
1723 IEM_MC_BEGIN(2, 0);
1724 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1725 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1728 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1729 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1730 IEM_MC_END();
1731 }
1732 return VINF_SUCCESS;
1733}
1734
1735
1736/** Opcode 0x0f 0x01 /7. */
1737FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1738{
1739 IEMOP_MNEMONIC(invlpg, "invlpg");
1740 IEMOP_HLP_MIN_486();
1741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1742 IEM_MC_BEGIN(1, 1);
1743 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1745 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1746 IEM_MC_END();
1747 return VINF_SUCCESS;
1748}
1749
1750
1751/** Opcode 0x0f 0x01 /7. */
1752FNIEMOP_DEF(iemOp_Grp7_swapgs)
1753{
1754 IEMOP_MNEMONIC(swapgs, "swapgs");
1755 IEMOP_HLP_ONLY_64BIT();
1756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1757 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1758}
1759
1760
1761/** Opcode 0x0f 0x01 /7. */
1762FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1763{
1764 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1766 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
1767}
1768
1769
1770/**
1771 * Group 7 jump table, memory variant.
1772 */
1773IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1774{
1775 iemOp_Grp7_sgdt,
1776 iemOp_Grp7_sidt,
1777 iemOp_Grp7_lgdt,
1778 iemOp_Grp7_lidt,
1779 iemOp_Grp7_smsw,
1780 iemOp_InvalidWithRM,
1781 iemOp_Grp7_lmsw,
1782 iemOp_Grp7_invlpg
1783};
1784
1785
1786/** Opcode 0x0f 0x01. */
1787FNIEMOP_DEF(iemOp_Grp7)
1788{
1789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1790 if (IEM_IS_MODRM_MEM_MODE(bRm))
1791 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1792
1793 switch (IEM_GET_MODRM_REG_8(bRm))
1794 {
1795 case 0:
1796 switch (IEM_GET_MODRM_RM_8(bRm))
1797 {
1798 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1799 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1800 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1801 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1802 }
1803 return IEMOP_RAISE_INVALID_OPCODE();
1804
1805 case 1:
1806 switch (IEM_GET_MODRM_RM_8(bRm))
1807 {
1808 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1809 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1810 }
1811 return IEMOP_RAISE_INVALID_OPCODE();
1812
1813 case 2:
1814 switch (IEM_GET_MODRM_RM_8(bRm))
1815 {
1816 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1817 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1818 }
1819 return IEMOP_RAISE_INVALID_OPCODE();
1820
1821 case 3:
1822 switch (IEM_GET_MODRM_RM_8(bRm))
1823 {
1824 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1825 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1826 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1827 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1828 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1829 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1830 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1831 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1833 }
1834
1835 case 4:
1836 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1837
1838 case 5:
1839 return IEMOP_RAISE_INVALID_OPCODE();
1840
1841 case 6:
1842 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1843
1844 case 7:
1845 switch (IEM_GET_MODRM_RM_8(bRm))
1846 {
1847 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1848 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1849 }
1850 return IEMOP_RAISE_INVALID_OPCODE();
1851
1852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1853 }
1854}
1855
1856/** Opcode 0x0f 0x00 /3. */
1857FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1858{
1859 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1861
1862 if (IEM_IS_MODRM_REG_MODE(bRm))
1863 {
1864 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1865 switch (pVCpu->iem.s.enmEffOpSize)
1866 {
1867 case IEMMODE_16BIT:
1868 {
1869 IEM_MC_BEGIN(3, 0);
1870 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1871 IEM_MC_ARG(uint16_t, u16Sel, 1);
1872 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1873
1874 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1875 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1876 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1877
1878 IEM_MC_END();
1879 return VINF_SUCCESS;
1880 }
1881
1882 case IEMMODE_32BIT:
1883 case IEMMODE_64BIT:
1884 {
1885 IEM_MC_BEGIN(3, 0);
1886 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1887 IEM_MC_ARG(uint16_t, u16Sel, 1);
1888 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1889
1890 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1891 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1892 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1893
1894 IEM_MC_END();
1895 return VINF_SUCCESS;
1896 }
1897
1898 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1899 }
1900 }
1901 else
1902 {
1903 switch (pVCpu->iem.s.enmEffOpSize)
1904 {
1905 case IEMMODE_16BIT:
1906 {
1907 IEM_MC_BEGIN(3, 1);
1908 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1909 IEM_MC_ARG(uint16_t, u16Sel, 1);
1910 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1912
1913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1914 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1915
1916 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1917 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1918 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1919
1920 IEM_MC_END();
1921 return VINF_SUCCESS;
1922 }
1923
1924 case IEMMODE_32BIT:
1925 case IEMMODE_64BIT:
1926 {
1927 IEM_MC_BEGIN(3, 1);
1928 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1929 IEM_MC_ARG(uint16_t, u16Sel, 1);
1930 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1932
1933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1934 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1935/** @todo testcase: make sure it's a 16-bit read. */
1936
1937 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1938 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1939 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1940
1941 IEM_MC_END();
1942 return VINF_SUCCESS;
1943 }
1944
1945 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1946 }
1947 }
1948}
1949
1950
1951
1952/** Opcode 0x0f 0x02. */
1953FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1954{
1955 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1956 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1957}
1958
1959
1960/** Opcode 0x0f 0x03. */
1961FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1962{
1963 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1964 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1965}
1966
1967
1968/** Opcode 0x0f 0x05. */
1969FNIEMOP_DEF(iemOp_syscall)
1970{
1971 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1973 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1974}
1975
1976
1977/** Opcode 0x0f 0x06. */
1978FNIEMOP_DEF(iemOp_clts)
1979{
1980 IEMOP_MNEMONIC(clts, "clts");
1981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1982 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1983}
1984
1985
1986/** Opcode 0x0f 0x07. */
1987FNIEMOP_DEF(iemOp_sysret)
1988{
1989 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1991 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1992}
1993
1994
1995/** Opcode 0x0f 0x08. */
1996FNIEMOP_DEF(iemOp_invd)
1997{
1998 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1999 IEMOP_HLP_MIN_486();
2000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2001 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
2002}
2003
2004
2005/** Opcode 0x0f 0x09. */
2006FNIEMOP_DEF(iemOp_wbinvd)
2007{
2008 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
2009 IEMOP_HLP_MIN_486();
2010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2011 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
2012}
2013
2014
2015/** Opcode 0x0f 0x0b. */
2016FNIEMOP_DEF(iemOp_ud2)
2017{
2018 IEMOP_MNEMONIC(ud2, "ud2");
2019 return IEMOP_RAISE_INVALID_OPCODE();
2020}
2021
2022/** Opcode 0x0f 0x0d. */
2023FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
2024{
2025 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
2026 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
2027 {
2028 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
2029 return IEMOP_RAISE_INVALID_OPCODE();
2030 }
2031
2032 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2033 if (IEM_IS_MODRM_REG_MODE(bRm))
2034 {
2035 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
2036 return IEMOP_RAISE_INVALID_OPCODE();
2037 }
2038
2039 switch (IEM_GET_MODRM_REG_8(bRm))
2040 {
2041 case 2: /* Aliased to /0 for the time being. */
2042 case 4: /* Aliased to /0 for the time being. */
2043 case 5: /* Aliased to /0 for the time being. */
2044 case 6: /* Aliased to /0 for the time being. */
2045 case 7: /* Aliased to /0 for the time being. */
2046 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2047 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2048 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2050 }
2051
2052 IEM_MC_BEGIN(0, 1);
2053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2056 /* Currently a NOP. */
2057 NOREF(GCPtrEffSrc);
2058 IEM_MC_ADVANCE_RIP_AND_FINISH();
2059 IEM_MC_END();
2060}
2061
2062
2063/** Opcode 0x0f 0x0e. */
2064FNIEMOP_DEF(iemOp_femms)
2065{
2066 IEMOP_MNEMONIC(femms, "femms");
2067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2068
2069 IEM_MC_BEGIN(0,0);
2070 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2071 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2072 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2073 IEM_MC_FPU_FROM_MMX_MODE();
2074 IEM_MC_ADVANCE_RIP_AND_FINISH();
2075 IEM_MC_END();
2076}
2077
2078
2079/** Opcode 0x0f 0x0f. */
2080FNIEMOP_DEF(iemOp_3Dnow)
2081{
2082 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2083 {
2084 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2085 return IEMOP_RAISE_INVALID_OPCODE();
2086 }
2087
2088#ifdef IEM_WITH_3DNOW
2089 /* This is pretty sparse, use switch instead of table. */
2090 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2091 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2092#else
2093 IEMOP_BITCH_ABOUT_STUB();
2094 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2095#endif
2096}
2097
2098
2099/**
2100 * @opcode 0x10
2101 * @oppfx none
2102 * @opcpuid sse
2103 * @opgroup og_sse_simdfp_datamove
2104 * @opxcpttype 4UA
2105 * @optest op1=1 op2=2 -> op1=2
2106 * @optest op1=0 op2=-22 -> op1=-22
2107 */
2108FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2109{
2110 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2111 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2112 if (IEM_IS_MODRM_REG_MODE(bRm))
2113 {
2114 /*
2115 * XMM128, XMM128.
2116 */
2117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2118 IEM_MC_BEGIN(0, 0);
2119 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2120 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2121 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2122 IEM_GET_MODRM_RM(pVCpu, bRm));
2123 IEM_MC_ADVANCE_RIP_AND_FINISH();
2124 IEM_MC_END();
2125 }
2126 else
2127 {
2128 /*
2129 * XMM128, [mem128].
2130 */
2131 IEM_MC_BEGIN(0, 2);
2132 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2134
2135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2137 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2138 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2139
2140 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2141 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2142
2143 IEM_MC_ADVANCE_RIP_AND_FINISH();
2144 IEM_MC_END();
2145 }
2146
2147}
2148
2149
2150/**
2151 * @opcode 0x10
2152 * @oppfx 0x66
2153 * @opcpuid sse2
2154 * @opgroup og_sse2_pcksclr_datamove
2155 * @opxcpttype 4UA
2156 * @optest op1=1 op2=2 -> op1=2
2157 * @optest op1=0 op2=-42 -> op1=-42
2158 */
2159FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2160{
2161 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2163 if (IEM_IS_MODRM_REG_MODE(bRm))
2164 {
2165 /*
2166 * XMM128, XMM128.
2167 */
2168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2169 IEM_MC_BEGIN(0, 0);
2170 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2171 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2172 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2173 IEM_GET_MODRM_RM(pVCpu, bRm));
2174 IEM_MC_ADVANCE_RIP_AND_FINISH();
2175 IEM_MC_END();
2176 }
2177 else
2178 {
2179 /*
2180 * XMM128, [mem128].
2181 */
2182 IEM_MC_BEGIN(0, 2);
2183 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2185
2186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2188 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2189 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2190
2191 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2192 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2193
2194 IEM_MC_ADVANCE_RIP_AND_FINISH();
2195 IEM_MC_END();
2196 }
2197}
2198
2199
2200/**
2201 * @opcode 0x10
2202 * @oppfx 0xf3
2203 * @opcpuid sse
2204 * @opgroup og_sse_simdfp_datamove
2205 * @opxcpttype 5
2206 * @optest op1=1 op2=2 -> op1=2
2207 * @optest op1=0 op2=-22 -> op1=-22
2208 */
2209FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2210{
2211 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2213 if (IEM_IS_MODRM_REG_MODE(bRm))
2214 {
2215 /*
2216 * XMM32, XMM32.
2217 */
2218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2219 IEM_MC_BEGIN(0, 1);
2220 IEM_MC_LOCAL(uint32_t, uSrc);
2221
2222 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2223 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2224 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2225 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2226
2227 IEM_MC_ADVANCE_RIP_AND_FINISH();
2228 IEM_MC_END();
2229 }
2230 else
2231 {
2232 /*
2233 * XMM128, [mem32].
2234 */
2235 IEM_MC_BEGIN(0, 2);
2236 IEM_MC_LOCAL(uint32_t, uSrc);
2237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2238
2239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2243
2244 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2245 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2246
2247 IEM_MC_ADVANCE_RIP_AND_FINISH();
2248 IEM_MC_END();
2249 }
2250}
2251
2252
2253/**
2254 * @opcode 0x10
2255 * @oppfx 0xf2
2256 * @opcpuid sse2
2257 * @opgroup og_sse2_pcksclr_datamove
2258 * @opxcpttype 5
2259 * @optest op1=1 op2=2 -> op1=2
2260 * @optest op1=0 op2=-42 -> op1=-42
2261 */
2262FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2263{
2264 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2265 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2266 if (IEM_IS_MODRM_REG_MODE(bRm))
2267 {
2268 /*
2269 * XMM64, XMM64.
2270 */
2271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2272 IEM_MC_BEGIN(0, 1);
2273 IEM_MC_LOCAL(uint64_t, uSrc);
2274
2275 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2276 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2277 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2278 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2279
2280 IEM_MC_ADVANCE_RIP_AND_FINISH();
2281 IEM_MC_END();
2282 }
2283 else
2284 {
2285 /*
2286 * XMM128, [mem64].
2287 */
2288 IEM_MC_BEGIN(0, 2);
2289 IEM_MC_LOCAL(uint64_t, uSrc);
2290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2291
2292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2294 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2295 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2296
2297 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2298 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2299
2300 IEM_MC_ADVANCE_RIP_AND_FINISH();
2301 IEM_MC_END();
2302 }
2303}
2304
2305
2306/**
2307 * @opcode 0x11
2308 * @oppfx none
2309 * @opcpuid sse
2310 * @opgroup og_sse_simdfp_datamove
2311 * @opxcpttype 4UA
2312 * @optest op1=1 op2=2 -> op1=2
2313 * @optest op1=0 op2=-42 -> op1=-42
2314 */
2315FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2316{
2317 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2319 if (IEM_IS_MODRM_REG_MODE(bRm))
2320 {
2321 /*
2322 * XMM128, XMM128.
2323 */
2324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2325 IEM_MC_BEGIN(0, 0);
2326 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2327 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2328 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2329 IEM_GET_MODRM_REG(pVCpu, bRm));
2330 IEM_MC_ADVANCE_RIP_AND_FINISH();
2331 IEM_MC_END();
2332 }
2333 else
2334 {
2335 /*
2336 * [mem128], XMM128.
2337 */
2338 IEM_MC_BEGIN(0, 2);
2339 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2341
2342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2344 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2345 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2346
2347 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2348 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2349
2350 IEM_MC_ADVANCE_RIP_AND_FINISH();
2351 IEM_MC_END();
2352 }
2353}
2354
2355
2356/**
2357 * @opcode 0x11
2358 * @oppfx 0x66
2359 * @opcpuid sse2
2360 * @opgroup og_sse2_pcksclr_datamove
2361 * @opxcpttype 4UA
2362 * @optest op1=1 op2=2 -> op1=2
2363 * @optest op1=0 op2=-42 -> op1=-42
2364 */
2365FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2366{
2367 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2369 if (IEM_IS_MODRM_REG_MODE(bRm))
2370 {
2371 /*
2372 * XMM128, XMM128.
2373 */
2374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2375 IEM_MC_BEGIN(0, 0);
2376 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2377 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2378 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2379 IEM_GET_MODRM_REG(pVCpu, bRm));
2380 IEM_MC_ADVANCE_RIP_AND_FINISH();
2381 IEM_MC_END();
2382 }
2383 else
2384 {
2385 /*
2386 * [mem128], XMM128.
2387 */
2388 IEM_MC_BEGIN(0, 2);
2389 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2391
2392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2394 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2395 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2396
2397 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2398 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2399
2400 IEM_MC_ADVANCE_RIP_AND_FINISH();
2401 IEM_MC_END();
2402 }
2403}
2404
2405
2406/**
2407 * @opcode 0x11
2408 * @oppfx 0xf3
2409 * @opcpuid sse
2410 * @opgroup og_sse_simdfp_datamove
2411 * @opxcpttype 5
2412 * @optest op1=1 op2=2 -> op1=2
2413 * @optest op1=0 op2=-22 -> op1=-22
2414 */
2415FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2416{
2417 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2419 if (IEM_IS_MODRM_REG_MODE(bRm))
2420 {
2421 /*
2422 * XMM32, XMM32.
2423 */
2424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2425 IEM_MC_BEGIN(0, 1);
2426 IEM_MC_LOCAL(uint32_t, uSrc);
2427
2428 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2429 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2430 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2431 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2432
2433 IEM_MC_ADVANCE_RIP_AND_FINISH();
2434 IEM_MC_END();
2435 }
2436 else
2437 {
2438 /*
2439 * [mem32], XMM32.
2440 */
2441 IEM_MC_BEGIN(0, 2);
2442 IEM_MC_LOCAL(uint32_t, uSrc);
2443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2444
2445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2447 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2448 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2449
2450 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2451 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2452
2453 IEM_MC_ADVANCE_RIP_AND_FINISH();
2454 IEM_MC_END();
2455 }
2456}
2457
2458
2459/**
2460 * @opcode 0x11
2461 * @oppfx 0xf2
2462 * @opcpuid sse2
2463 * @opgroup og_sse2_pcksclr_datamove
2464 * @opxcpttype 5
2465 * @optest op1=1 op2=2 -> op1=2
2466 * @optest op1=0 op2=-42 -> op1=-42
2467 */
2468FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2469{
2470 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2472 if (IEM_IS_MODRM_REG_MODE(bRm))
2473 {
2474 /*
2475 * XMM64, XMM64.
2476 */
2477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2478 IEM_MC_BEGIN(0, 1);
2479 IEM_MC_LOCAL(uint64_t, uSrc);
2480
2481 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2482 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2483 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2484 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2485
2486 IEM_MC_ADVANCE_RIP_AND_FINISH();
2487 IEM_MC_END();
2488 }
2489 else
2490 {
2491 /*
2492 * [mem64], XMM64.
2493 */
2494 IEM_MC_BEGIN(0, 2);
2495 IEM_MC_LOCAL(uint64_t, uSrc);
2496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2497
2498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2500 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2502
2503 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2504 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2505
2506 IEM_MC_ADVANCE_RIP_AND_FINISH();
2507 IEM_MC_END();
2508 }
2509}
2510
2511
2512FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2513{
2514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2515 if (IEM_IS_MODRM_REG_MODE(bRm))
2516 {
2517 /**
2518 * @opcode 0x12
2519 * @opcodesub 11 mr/reg
2520 * @oppfx none
2521 * @opcpuid sse
2522 * @opgroup og_sse_simdfp_datamove
2523 * @opxcpttype 5
2524 * @optest op1=1 op2=2 -> op1=2
2525 * @optest op1=0 op2=-42 -> op1=-42
2526 */
2527 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2528
2529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2530 IEM_MC_BEGIN(0, 1);
2531 IEM_MC_LOCAL(uint64_t, uSrc);
2532
2533 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2534 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2535 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2536 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2537
2538 IEM_MC_ADVANCE_RIP_AND_FINISH();
2539 IEM_MC_END();
2540 }
2541 else
2542 {
2543 /**
2544 * @opdone
2545 * @opcode 0x12
2546 * @opcodesub !11 mr/reg
2547 * @oppfx none
2548 * @opcpuid sse
2549 * @opgroup og_sse_simdfp_datamove
2550 * @opxcpttype 5
2551 * @optest op1=1 op2=2 -> op1=2
2552 * @optest op1=0 op2=-42 -> op1=-42
2553 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2554 */
2555 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2556
2557 IEM_MC_BEGIN(0, 2);
2558 IEM_MC_LOCAL(uint64_t, uSrc);
2559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2560
2561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2563 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2564 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2565
2566 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2567 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2568
2569 IEM_MC_ADVANCE_RIP_AND_FINISH();
2570 IEM_MC_END();
2571 }
2572}
2573
2574
2575/**
2576 * @opcode 0x12
2577 * @opcodesub !11 mr/reg
2578 * @oppfx 0x66
2579 * @opcpuid sse2
2580 * @opgroup og_sse2_pcksclr_datamove
2581 * @opxcpttype 5
2582 * @optest op1=1 op2=2 -> op1=2
2583 * @optest op1=0 op2=-42 -> op1=-42
2584 */
2585FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2586{
2587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2588 if (IEM_IS_MODRM_MEM_MODE(bRm))
2589 {
2590 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2591
2592 IEM_MC_BEGIN(0, 2);
2593 IEM_MC_LOCAL(uint64_t, uSrc);
2594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2595
2596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2598 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2599 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2600
2601 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2602 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2603
2604 IEM_MC_ADVANCE_RIP_AND_FINISH();
2605 IEM_MC_END();
2606 }
2607
2608 /**
2609 * @opdone
2610 * @opmnemonic ud660f12m3
2611 * @opcode 0x12
2612 * @opcodesub 11 mr/reg
2613 * @oppfx 0x66
2614 * @opunused immediate
2615 * @opcpuid sse
2616 * @optest ->
2617 */
2618 else
2619 return IEMOP_RAISE_INVALID_OPCODE();
2620}
2621
2622
2623/**
2624 * @opcode 0x12
2625 * @oppfx 0xf3
2626 * @opcpuid sse3
2627 * @opgroup og_sse3_pcksclr_datamove
2628 * @opxcpttype 4
2629 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2630 * op1=0x00000002000000020000000100000001
2631 */
2632FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2633{
2634 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2636 if (IEM_IS_MODRM_REG_MODE(bRm))
2637 {
2638 /*
2639 * Register, register.
2640 */
2641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2642 IEM_MC_BEGIN(2, 0);
2643 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2644 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2645
2646 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2647 IEM_MC_PREPARE_SSE_USAGE();
2648
2649 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2650 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2651 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2652
2653 IEM_MC_ADVANCE_RIP_AND_FINISH();
2654 IEM_MC_END();
2655 }
2656 else
2657 {
2658 /*
2659 * Register, memory.
2660 */
2661 IEM_MC_BEGIN(2, 2);
2662 IEM_MC_LOCAL(RTUINT128U, uSrc);
2663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2664 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2665 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2666
2667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2669 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2670 IEM_MC_PREPARE_SSE_USAGE();
2671
2672 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2673 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2674 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2675
2676 IEM_MC_ADVANCE_RIP_AND_FINISH();
2677 IEM_MC_END();
2678 }
2679}
2680
2681
2682/**
2683 * @opcode 0x12
2684 * @oppfx 0xf2
2685 * @opcpuid sse3
2686 * @opgroup og_sse3_pcksclr_datamove
2687 * @opxcpttype 5
2688 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2689 * op1=0x22222222111111112222222211111111
2690 */
2691FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2692{
2693 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2694 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2695 if (IEM_IS_MODRM_REG_MODE(bRm))
2696 {
2697 /*
2698 * Register, register.
2699 */
2700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2701 IEM_MC_BEGIN(2, 0);
2702 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2703 IEM_MC_ARG(uint64_t, uSrc, 1);
2704
2705 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2706 IEM_MC_PREPARE_SSE_USAGE();
2707
2708 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2709 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2710 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2711
2712 IEM_MC_ADVANCE_RIP_AND_FINISH();
2713 IEM_MC_END();
2714 }
2715 else
2716 {
2717 /*
2718 * Register, memory.
2719 */
2720 IEM_MC_BEGIN(2, 2);
2721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2722 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2723 IEM_MC_ARG(uint64_t, uSrc, 1);
2724
2725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2727 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2728 IEM_MC_PREPARE_SSE_USAGE();
2729
2730 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2731 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2732 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2733
2734 IEM_MC_ADVANCE_RIP_AND_FINISH();
2735 IEM_MC_END();
2736 }
2737}
2738
2739
2740/**
2741 * @opcode 0x13
2742 * @opcodesub !11 mr/reg
2743 * @oppfx none
2744 * @opcpuid sse
2745 * @opgroup og_sse_simdfp_datamove
2746 * @opxcpttype 5
2747 * @optest op1=1 op2=2 -> op1=2
2748 * @optest op1=0 op2=-42 -> op1=-42
2749 */
2750FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2751{
2752 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2753 if (IEM_IS_MODRM_MEM_MODE(bRm))
2754 {
2755 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2756
2757 IEM_MC_BEGIN(0, 2);
2758 IEM_MC_LOCAL(uint64_t, uSrc);
2759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2760
2761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2763 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2764 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2765
2766 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2767 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2768
2769 IEM_MC_ADVANCE_RIP_AND_FINISH();
2770 IEM_MC_END();
2771 }
2772
2773 /**
2774 * @opdone
2775 * @opmnemonic ud0f13m3
2776 * @opcode 0x13
2777 * @opcodesub 11 mr/reg
2778 * @oppfx none
2779 * @opunused immediate
2780 * @opcpuid sse
2781 * @optest ->
2782 */
2783 else
2784 return IEMOP_RAISE_INVALID_OPCODE();
2785}
2786
2787
2788/**
2789 * @opcode 0x13
2790 * @opcodesub !11 mr/reg
2791 * @oppfx 0x66
2792 * @opcpuid sse2
2793 * @opgroup og_sse2_pcksclr_datamove
2794 * @opxcpttype 5
2795 * @optest op1=1 op2=2 -> op1=2
2796 * @optest op1=0 op2=-42 -> op1=-42
2797 */
2798FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2799{
2800 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2801 if (IEM_IS_MODRM_MEM_MODE(bRm))
2802 {
2803 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2804 IEM_MC_BEGIN(0, 2);
2805 IEM_MC_LOCAL(uint64_t, uSrc);
2806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2807
2808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2810 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2811 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2812
2813 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2814 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2815
2816 IEM_MC_ADVANCE_RIP_AND_FINISH();
2817 IEM_MC_END();
2818 }
2819
2820 /**
2821 * @opdone
2822 * @opmnemonic ud660f13m3
2823 * @opcode 0x13
2824 * @opcodesub 11 mr/reg
2825 * @oppfx 0x66
2826 * @opunused immediate
2827 * @opcpuid sse
2828 * @optest ->
2829 */
2830 else
2831 return IEMOP_RAISE_INVALID_OPCODE();
2832}
2833
2834
2835/**
2836 * @opmnemonic udf30f13
2837 * @opcode 0x13
2838 * @oppfx 0xf3
2839 * @opunused intel-modrm
2840 * @opcpuid sse
2841 * @optest ->
2842 * @opdone
2843 */
2844
2845/**
2846 * @opmnemonic udf20f13
2847 * @opcode 0x13
2848 * @oppfx 0xf2
2849 * @opunused intel-modrm
2850 * @opcpuid sse
2851 * @optest ->
2852 * @opdone
2853 */
2854
2855/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2856FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2857{
2858 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2859 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2860}
2861
2862
2863/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2864FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2865{
2866 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2867 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2868}
2869
2870
2871/**
2872 * @opdone
2873 * @opmnemonic udf30f14
2874 * @opcode 0x14
2875 * @oppfx 0xf3
2876 * @opunused intel-modrm
2877 * @opcpuid sse
2878 * @optest ->
2879 * @opdone
2880 */
2881
2882/**
2883 * @opmnemonic udf20f14
2884 * @opcode 0x14
2885 * @oppfx 0xf2
2886 * @opunused intel-modrm
2887 * @opcpuid sse
2888 * @optest ->
2889 * @opdone
2890 */
2891
2892/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2893FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2894{
2895 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2896 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2897}
2898
2899
2900/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2901FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2902{
2903 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2904 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2905}
2906
2907
2908/* Opcode 0xf3 0x0f 0x15 - invalid */
2909/* Opcode 0xf2 0x0f 0x15 - invalid */
2910
2911/**
2912 * @opdone
2913 * @opmnemonic udf30f15
2914 * @opcode 0x15
2915 * @oppfx 0xf3
2916 * @opunused intel-modrm
2917 * @opcpuid sse
2918 * @optest ->
2919 * @opdone
2920 */
2921
2922/**
2923 * @opmnemonic udf20f15
2924 * @opcode 0x15
2925 * @oppfx 0xf2
2926 * @opunused intel-modrm
2927 * @opcpuid sse
2928 * @optest ->
2929 * @opdone
2930 */
2931
2932FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2933{
2934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2935 if (IEM_IS_MODRM_REG_MODE(bRm))
2936 {
2937 /**
2938 * @opcode 0x16
2939 * @opcodesub 11 mr/reg
2940 * @oppfx none
2941 * @opcpuid sse
2942 * @opgroup og_sse_simdfp_datamove
2943 * @opxcpttype 5
2944 * @optest op1=1 op2=2 -> op1=2
2945 * @optest op1=0 op2=-42 -> op1=-42
2946 */
2947 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2948
2949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2950 IEM_MC_BEGIN(0, 1);
2951 IEM_MC_LOCAL(uint64_t, uSrc);
2952
2953 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2954 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2955 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2956 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2957
2958 IEM_MC_ADVANCE_RIP_AND_FINISH();
2959 IEM_MC_END();
2960 }
2961 else
2962 {
2963 /**
2964 * @opdone
2965 * @opcode 0x16
2966 * @opcodesub !11 mr/reg
2967 * @oppfx none
2968 * @opcpuid sse
2969 * @opgroup og_sse_simdfp_datamove
2970 * @opxcpttype 5
2971 * @optest op1=1 op2=2 -> op1=2
2972 * @optest op1=0 op2=-42 -> op1=-42
2973 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2974 */
2975 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2976
2977 IEM_MC_BEGIN(0, 2);
2978 IEM_MC_LOCAL(uint64_t, uSrc);
2979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2980
2981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2983 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2984 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2985
2986 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2987 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2988
2989 IEM_MC_ADVANCE_RIP_AND_FINISH();
2990 IEM_MC_END();
2991 }
2992}
2993
2994
2995/**
2996 * @opcode 0x16
2997 * @opcodesub !11 mr/reg
2998 * @oppfx 0x66
2999 * @opcpuid sse2
3000 * @opgroup og_sse2_pcksclr_datamove
3001 * @opxcpttype 5
3002 * @optest op1=1 op2=2 -> op1=2
3003 * @optest op1=0 op2=-42 -> op1=-42
3004 */
3005FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
3006{
3007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3008 if (IEM_IS_MODRM_MEM_MODE(bRm))
3009 {
3010 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3011 IEM_MC_BEGIN(0, 2);
3012 IEM_MC_LOCAL(uint64_t, uSrc);
3013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3014
3015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3017 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3018 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3019
3020 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3021 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3022
3023 IEM_MC_ADVANCE_RIP_AND_FINISH();
3024 IEM_MC_END();
3025 }
3026
3027 /**
3028 * @opdone
3029 * @opmnemonic ud660f16m3
3030 * @opcode 0x16
3031 * @opcodesub 11 mr/reg
3032 * @oppfx 0x66
3033 * @opunused immediate
3034 * @opcpuid sse
3035 * @optest ->
3036 */
3037 else
3038 return IEMOP_RAISE_INVALID_OPCODE();
3039}
3040
3041
3042/**
3043 * @opcode 0x16
3044 * @oppfx 0xf3
3045 * @opcpuid sse3
3046 * @opgroup og_sse3_pcksclr_datamove
3047 * @opxcpttype 4
3048 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3049 * op1=0x00000002000000020000000100000001
3050 */
3051FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3052{
3053 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3055 if (IEM_IS_MODRM_REG_MODE(bRm))
3056 {
3057 /*
3058 * XMM128, XMM128.
3059 */
3060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3061 IEM_MC_BEGIN(2, 0);
3062 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3063 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3064
3065 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3066 IEM_MC_PREPARE_SSE_USAGE();
3067
3068 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3069 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3070 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3071
3072 IEM_MC_ADVANCE_RIP_AND_FINISH();
3073 IEM_MC_END();
3074 }
3075 else
3076 {
3077 /*
3078 * XMM128, [mem128].
3079 */
3080 IEM_MC_BEGIN(2, 2);
3081 IEM_MC_LOCAL(RTUINT128U, uSrc);
3082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3083 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3084 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3085
3086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3088 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3089 IEM_MC_PREPARE_SSE_USAGE();
3090
3091 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3092 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3093 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3094
3095 IEM_MC_ADVANCE_RIP_AND_FINISH();
3096 IEM_MC_END();
3097 }
3098}
3099
3100/**
3101 * @opdone
3102 * @opmnemonic udf30f16
3103 * @opcode 0x16
3104 * @oppfx 0xf2
3105 * @opunused intel-modrm
3106 * @opcpuid sse
3107 * @optest ->
3108 * @opdone
3109 */
3110
3111
3112/**
3113 * @opcode 0x17
3114 * @opcodesub !11 mr/reg
3115 * @oppfx none
3116 * @opcpuid sse
3117 * @opgroup og_sse_simdfp_datamove
3118 * @opxcpttype 5
3119 * @optest op1=1 op2=2 -> op1=2
3120 * @optest op1=0 op2=-42 -> op1=-42
3121 */
3122FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3123{
3124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3125 if (IEM_IS_MODRM_MEM_MODE(bRm))
3126 {
3127 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3128
3129 IEM_MC_BEGIN(0, 2);
3130 IEM_MC_LOCAL(uint64_t, uSrc);
3131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3132
3133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3135 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3136 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3137
3138 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3139 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3140
3141 IEM_MC_ADVANCE_RIP_AND_FINISH();
3142 IEM_MC_END();
3143 }
3144
3145 /**
3146 * @opdone
3147 * @opmnemonic ud0f17m3
3148 * @opcode 0x17
3149 * @opcodesub 11 mr/reg
3150 * @oppfx none
3151 * @opunused immediate
3152 * @opcpuid sse
3153 * @optest ->
3154 */
3155 else
3156 return IEMOP_RAISE_INVALID_OPCODE();
3157}
3158
3159
3160/**
3161 * @opcode 0x17
3162 * @opcodesub !11 mr/reg
3163 * @oppfx 0x66
3164 * @opcpuid sse2
3165 * @opgroup og_sse2_pcksclr_datamove
3166 * @opxcpttype 5
3167 * @optest op1=1 op2=2 -> op1=2
3168 * @optest op1=0 op2=-42 -> op1=-42
3169 */
3170FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3171{
3172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3173 if (IEM_IS_MODRM_MEM_MODE(bRm))
3174 {
3175 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3176
3177 IEM_MC_BEGIN(0, 2);
3178 IEM_MC_LOCAL(uint64_t, uSrc);
3179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3180
3181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3183 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3184 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3185
3186 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3187 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3188
3189 IEM_MC_ADVANCE_RIP_AND_FINISH();
3190 IEM_MC_END();
3191 }
3192
3193 /**
3194 * @opdone
3195 * @opmnemonic ud660f17m3
3196 * @opcode 0x17
3197 * @opcodesub 11 mr/reg
3198 * @oppfx 0x66
3199 * @opunused immediate
3200 * @opcpuid sse
3201 * @optest ->
3202 */
3203 else
3204 return IEMOP_RAISE_INVALID_OPCODE();
3205}
3206
3207
3208/**
3209 * @opdone
3210 * @opmnemonic udf30f17
3211 * @opcode 0x17
3212 * @oppfx 0xf3
3213 * @opunused intel-modrm
3214 * @opcpuid sse
3215 * @optest ->
3216 * @opdone
3217 */
3218
3219/**
3220 * @opmnemonic udf20f17
3221 * @opcode 0x17
3222 * @oppfx 0xf2
3223 * @opunused intel-modrm
3224 * @opcpuid sse
3225 * @optest ->
3226 * @opdone
3227 */
3228
3229
3230/** Opcode 0x0f 0x18. */
3231FNIEMOP_DEF(iemOp_prefetch_Grp16)
3232{
3233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3234 if (IEM_IS_MODRM_MEM_MODE(bRm))
3235 {
3236 switch (IEM_GET_MODRM_REG_8(bRm))
3237 {
3238 case 4: /* Aliased to /0 for the time being according to AMD. */
3239 case 5: /* Aliased to /0 for the time being according to AMD. */
3240 case 6: /* Aliased to /0 for the time being according to AMD. */
3241 case 7: /* Aliased to /0 for the time being according to AMD. */
3242 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3243 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3244 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3245 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3246 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3247 }
3248
3249 IEM_MC_BEGIN(0, 1);
3250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3253 /* Currently a NOP. */
3254 NOREF(GCPtrEffSrc);
3255 IEM_MC_ADVANCE_RIP_AND_FINISH();
3256 IEM_MC_END();
3257 }
3258 else
3259 return IEMOP_RAISE_INVALID_OPCODE();
3260}
3261
3262
3263/** Opcode 0x0f 0x19..0x1f. */
3264FNIEMOP_DEF(iemOp_nop_Ev)
3265{
3266 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3267 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3268 if (IEM_IS_MODRM_REG_MODE(bRm))
3269 {
3270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3271 IEM_MC_BEGIN(0, 0);
3272 IEM_MC_ADVANCE_RIP_AND_FINISH();
3273 IEM_MC_END();
3274 }
3275 else
3276 {
3277 IEM_MC_BEGIN(0, 1);
3278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3281 /* Currently a NOP. */
3282 NOREF(GCPtrEffSrc);
3283 IEM_MC_ADVANCE_RIP_AND_FINISH();
3284 IEM_MC_END();
3285 }
3286}
3287
3288
3289/** Opcode 0x0f 0x20. */
3290FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3291{
3292 /* mod is ignored, as is operand size overrides. */
3293 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3294 IEMOP_HLP_MIN_386();
3295 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3296 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3297 else
3298 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3299
3300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3301 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3302 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3303 {
3304 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3305 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3306 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3307 iCrReg |= 8;
3308 }
3309 switch (iCrReg)
3310 {
3311 case 0: case 2: case 3: case 4: case 8:
3312 break;
3313 default:
3314 return IEMOP_RAISE_INVALID_OPCODE();
3315 }
3316 IEMOP_HLP_DONE_DECODING();
3317
3318 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3319}
3320
3321
3322/** Opcode 0x0f 0x21. */
3323FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3324{
3325 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3326 IEMOP_HLP_MIN_386();
3327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3329 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3330 return IEMOP_RAISE_INVALID_OPCODE();
3331 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
3332 IEM_GET_MODRM_RM(pVCpu, bRm),
3333 IEM_GET_MODRM_REG_8(bRm));
3334}
3335
3336
3337/** Opcode 0x0f 0x22. */
3338FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3339{
3340 /* mod is ignored, as is operand size overrides. */
3341 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3342 IEMOP_HLP_MIN_386();
3343 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3344 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3345 else
3346 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3347
3348 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3349 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3350 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3351 {
3352 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3353 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3354 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3355 iCrReg |= 8;
3356 }
3357 switch (iCrReg)
3358 {
3359 case 0: case 2: case 3: case 4: case 8:
3360 break;
3361 default:
3362 return IEMOP_RAISE_INVALID_OPCODE();
3363 }
3364 IEMOP_HLP_DONE_DECODING();
3365
3366 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3367}
3368
3369
3370/** Opcode 0x0f 0x23. */
3371FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3372{
3373 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3374 IEMOP_HLP_MIN_386();
3375 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3377 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3378 return IEMOP_RAISE_INVALID_OPCODE();
3379 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
3380 IEM_GET_MODRM_REG_8(bRm),
3381 IEM_GET_MODRM_RM(pVCpu, bRm));
3382}
3383
3384
3385/** Opcode 0x0f 0x24. */
3386FNIEMOP_DEF(iemOp_mov_Rd_Td)
3387{
3388 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3389 IEMOP_HLP_MIN_386();
3390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3392 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3393 return IEMOP_RAISE_INVALID_OPCODE();
3394 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
3395 IEM_GET_MODRM_RM(pVCpu, bRm),
3396 IEM_GET_MODRM_REG_8(bRm));
3397}
3398
3399
3400/** Opcode 0x0f 0x26. */
3401FNIEMOP_DEF(iemOp_mov_Td_Rd)
3402{
3403 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3404 IEMOP_HLP_MIN_386();
3405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3407 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3408 return IEMOP_RAISE_INVALID_OPCODE();
3409 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
3410 IEM_GET_MODRM_REG_8(bRm),
3411 IEM_GET_MODRM_RM(pVCpu, bRm));
3412}
3413
3414
3415/**
3416 * @opcode 0x28
3417 * @oppfx none
3418 * @opcpuid sse
3419 * @opgroup og_sse_simdfp_datamove
3420 * @opxcpttype 1
3421 * @optest op1=1 op2=2 -> op1=2
3422 * @optest op1=0 op2=-42 -> op1=-42
3423 */
3424FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3425{
3426 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3427 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3428 if (IEM_IS_MODRM_REG_MODE(bRm))
3429 {
3430 /*
3431 * Register, register.
3432 */
3433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3434 IEM_MC_BEGIN(0, 0);
3435 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3436 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3437 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3438 IEM_GET_MODRM_RM(pVCpu, bRm));
3439 IEM_MC_ADVANCE_RIP_AND_FINISH();
3440 IEM_MC_END();
3441 }
3442 else
3443 {
3444 /*
3445 * Register, memory.
3446 */
3447 IEM_MC_BEGIN(0, 2);
3448 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3450
3451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3453 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3454 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3455
3456 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3457 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3458
3459 IEM_MC_ADVANCE_RIP_AND_FINISH();
3460 IEM_MC_END();
3461 }
3462}
3463
3464/**
3465 * @opcode 0x28
3466 * @oppfx 66
3467 * @opcpuid sse2
3468 * @opgroup og_sse2_pcksclr_datamove
3469 * @opxcpttype 1
3470 * @optest op1=1 op2=2 -> op1=2
3471 * @optest op1=0 op2=-42 -> op1=-42
3472 */
3473FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3474{
3475 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3476 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3477 if (IEM_IS_MODRM_REG_MODE(bRm))
3478 {
3479 /*
3480 * Register, register.
3481 */
3482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3483 IEM_MC_BEGIN(0, 0);
3484 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3485 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3486 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3487 IEM_GET_MODRM_RM(pVCpu, bRm));
3488 IEM_MC_ADVANCE_RIP_AND_FINISH();
3489 IEM_MC_END();
3490 }
3491 else
3492 {
3493 /*
3494 * Register, memory.
3495 */
3496 IEM_MC_BEGIN(0, 2);
3497 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3499
3500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3502 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3503 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3504
3505 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3506 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3507
3508 IEM_MC_ADVANCE_RIP_AND_FINISH();
3509 IEM_MC_END();
3510 }
3511}
3512
3513/* Opcode 0xf3 0x0f 0x28 - invalid */
3514/* Opcode 0xf2 0x0f 0x28 - invalid */
3515
3516/**
3517 * @opcode 0x29
3518 * @oppfx none
3519 * @opcpuid sse
3520 * @opgroup og_sse_simdfp_datamove
3521 * @opxcpttype 1
3522 * @optest op1=1 op2=2 -> op1=2
3523 * @optest op1=0 op2=-42 -> op1=-42
3524 */
3525FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3526{
3527 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3529 if (IEM_IS_MODRM_REG_MODE(bRm))
3530 {
3531 /*
3532 * Register, register.
3533 */
3534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3535 IEM_MC_BEGIN(0, 0);
3536 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3537 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3538 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3539 IEM_GET_MODRM_REG(pVCpu, bRm));
3540 IEM_MC_ADVANCE_RIP_AND_FINISH();
3541 IEM_MC_END();
3542 }
3543 else
3544 {
3545 /*
3546 * Memory, register.
3547 */
3548 IEM_MC_BEGIN(0, 2);
3549 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3551
3552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3554 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3555 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3556
3557 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3558 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3559
3560 IEM_MC_ADVANCE_RIP_AND_FINISH();
3561 IEM_MC_END();
3562 }
3563}
3564
3565/**
3566 * @opcode 0x29
3567 * @oppfx 66
3568 * @opcpuid sse2
3569 * @opgroup og_sse2_pcksclr_datamove
3570 * @opxcpttype 1
3571 * @optest op1=1 op2=2 -> op1=2
3572 * @optest op1=0 op2=-42 -> op1=-42
3573 */
3574FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3575{
3576 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3577 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3578 if (IEM_IS_MODRM_REG_MODE(bRm))
3579 {
3580 /*
3581 * Register, register.
3582 */
3583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3584 IEM_MC_BEGIN(0, 0);
3585 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3586 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3587 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3588 IEM_GET_MODRM_REG(pVCpu, bRm));
3589 IEM_MC_ADVANCE_RIP_AND_FINISH();
3590 IEM_MC_END();
3591 }
3592 else
3593 {
3594 /*
3595 * Memory, register.
3596 */
3597 IEM_MC_BEGIN(0, 2);
3598 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3600
3601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3603 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3604 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3605
3606 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3607 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3608
3609 IEM_MC_ADVANCE_RIP_AND_FINISH();
3610 IEM_MC_END();
3611 }
3612}
3613
3614/* Opcode 0xf3 0x0f 0x29 - invalid */
3615/* Opcode 0xf2 0x0f 0x29 - invalid */
3616
3617
3618/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3619FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3620{
3621 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
3622 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3623 if (IEM_IS_MODRM_REG_MODE(bRm))
3624 {
3625 /*
3626 * XMM, MMX
3627 */
3628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3629
3630 IEM_MC_BEGIN(3, 1);
3631 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3632 IEM_MC_LOCAL(X86XMMREG, Dst);
3633 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3634 IEM_MC_ARG(uint64_t, u64Src, 2);
3635 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3636 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3637 IEM_MC_PREPARE_FPU_USAGE();
3638 IEM_MC_FPU_TO_MMX_MODE();
3639
3640 IEM_MC_REF_MXCSR(pfMxcsr);
3641 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3642 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3643
3644 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3645 IEM_MC_IF_MXCSR_XCPT_PENDING()
3646 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3647 IEM_MC_ELSE()
3648 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3649 IEM_MC_ENDIF();
3650
3651 IEM_MC_ADVANCE_RIP_AND_FINISH();
3652 IEM_MC_END();
3653 }
3654 else
3655 {
3656 /*
3657 * XMM, [mem64]
3658 */
3659 IEM_MC_BEGIN(3, 2);
3660 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3661 IEM_MC_LOCAL(X86XMMREG, Dst);
3662 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3663 IEM_MC_ARG(uint64_t, u64Src, 2);
3664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3665
3666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3668 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3669 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3670 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3671
3672 IEM_MC_PREPARE_FPU_USAGE();
3673 IEM_MC_FPU_TO_MMX_MODE();
3674 IEM_MC_REF_MXCSR(pfMxcsr);
3675
3676 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3677 IEM_MC_IF_MXCSR_XCPT_PENDING()
3678 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3679 IEM_MC_ELSE()
3680 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3681 IEM_MC_ENDIF();
3682
3683 IEM_MC_ADVANCE_RIP_AND_FINISH();
3684 IEM_MC_END();
3685 }
3686}
3687
3688
3689/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3690FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3691{
3692 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
3693 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3694 if (IEM_IS_MODRM_REG_MODE(bRm))
3695 {
3696 /*
3697 * XMM, MMX
3698 */
3699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3700
3701 IEM_MC_BEGIN(3, 1);
3702 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3703 IEM_MC_LOCAL(X86XMMREG, Dst);
3704 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3705 IEM_MC_ARG(uint64_t, u64Src, 2);
3706 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3707 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3708 IEM_MC_PREPARE_FPU_USAGE();
3709 IEM_MC_FPU_TO_MMX_MODE();
3710
3711 IEM_MC_REF_MXCSR(pfMxcsr);
3712 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3713
3714 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3715 IEM_MC_IF_MXCSR_XCPT_PENDING()
3716 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3717 IEM_MC_ELSE()
3718 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3719 IEM_MC_ENDIF();
3720
3721 IEM_MC_ADVANCE_RIP_AND_FINISH();
3722 IEM_MC_END();
3723 }
3724 else
3725 {
3726 /*
3727 * XMM, [mem64]
3728 */
3729 IEM_MC_BEGIN(3, 3);
3730 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3731 IEM_MC_LOCAL(X86XMMREG, Dst);
3732 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3733 IEM_MC_ARG(uint64_t, u64Src, 2);
3734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3735
3736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3738 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3739 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3740 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3741
3742 /* Doesn't cause a transition to MMX mode. */
3743 IEM_MC_PREPARE_SSE_USAGE();
3744 IEM_MC_REF_MXCSR(pfMxcsr);
3745
3746 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3747 IEM_MC_IF_MXCSR_XCPT_PENDING()
3748 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3749 IEM_MC_ELSE()
3750 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3751 IEM_MC_ENDIF();
3752
3753 IEM_MC_ADVANCE_RIP_AND_FINISH();
3754 IEM_MC_END();
3755 }
3756}
3757
3758
3759/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3760FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3761{
3762 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3763
3764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3765 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3766 {
3767 if (IEM_IS_MODRM_REG_MODE(bRm))
3768 {
3769 /* XMM, greg64 */
3770 IEM_MC_BEGIN(3, 2);
3771 IEM_MC_LOCAL(uint32_t, fMxcsr);
3772 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3773 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3774 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3775 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3776
3777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3778 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3779 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3780
3781 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3782 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3783 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3784 IEM_MC_IF_MXCSR_XCPT_PENDING()
3785 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3786 IEM_MC_ELSE()
3787 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3788 IEM_MC_ENDIF();
3789
3790 IEM_MC_ADVANCE_RIP_AND_FINISH();
3791 IEM_MC_END();
3792 }
3793 else
3794 {
3795 /* XMM, [mem64] */
3796 IEM_MC_BEGIN(3, 4);
3797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3798 IEM_MC_LOCAL(uint32_t, fMxcsr);
3799 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3800 IEM_MC_LOCAL(int64_t, i64Src);
3801 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3802 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3803 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3804
3805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3807 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3808 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3809
3810 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3811 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3812 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3813 IEM_MC_IF_MXCSR_XCPT_PENDING()
3814 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3815 IEM_MC_ELSE()
3816 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3817 IEM_MC_ENDIF();
3818
3819 IEM_MC_ADVANCE_RIP_AND_FINISH();
3820 IEM_MC_END();
3821 }
3822 }
3823 else
3824 {
3825 if (IEM_IS_MODRM_REG_MODE(bRm))
3826 {
3827 /* greg, XMM */
3828 IEM_MC_BEGIN(3, 2);
3829 IEM_MC_LOCAL(uint32_t, fMxcsr);
3830 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3831 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3832 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3833 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3834
3835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3836 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3837 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3838
3839 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3840 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3841 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3842 IEM_MC_IF_MXCSR_XCPT_PENDING()
3843 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3844 IEM_MC_ELSE()
3845 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3846 IEM_MC_ENDIF();
3847
3848 IEM_MC_ADVANCE_RIP_AND_FINISH();
3849 IEM_MC_END();
3850 }
3851 else
3852 {
3853 /* greg, [mem32] */
3854 IEM_MC_BEGIN(3, 4);
3855 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3856 IEM_MC_LOCAL(uint32_t, fMxcsr);
3857 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3858 IEM_MC_LOCAL(int32_t, i32Src);
3859 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3860 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3861 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3862
3863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3865 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3866 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3867
3868 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3869 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3870 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3871 IEM_MC_IF_MXCSR_XCPT_PENDING()
3872 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3873 IEM_MC_ELSE()
3874 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3875 IEM_MC_ENDIF();
3876
3877 IEM_MC_ADVANCE_RIP_AND_FINISH();
3878 IEM_MC_END();
3879 }
3880 }
3881}
3882
3883
3884/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3885FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3886{
3887 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3888
3889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3890 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3891 {
3892 if (IEM_IS_MODRM_REG_MODE(bRm))
3893 {
3894 /* XMM, greg64 */
3895 IEM_MC_BEGIN(3, 2);
3896 IEM_MC_LOCAL(uint32_t, fMxcsr);
3897 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3898 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3899 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3900 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3901
3902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3903 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3904 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3905
3906 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3907 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3908 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3909 IEM_MC_IF_MXCSR_XCPT_PENDING()
3910 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3911 IEM_MC_ELSE()
3912 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3913 IEM_MC_ENDIF();
3914
3915 IEM_MC_ADVANCE_RIP_AND_FINISH();
3916 IEM_MC_END();
3917 }
3918 else
3919 {
3920 /* XMM, [mem64] */
3921 IEM_MC_BEGIN(3, 4);
3922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3923 IEM_MC_LOCAL(uint32_t, fMxcsr);
3924 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3925 IEM_MC_LOCAL(int64_t, i64Src);
3926 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3927 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3928 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3929
3930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3932 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3933 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3934
3935 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3936 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3937 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3938 IEM_MC_IF_MXCSR_XCPT_PENDING()
3939 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3940 IEM_MC_ELSE()
3941 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3942 IEM_MC_ENDIF();
3943
3944 IEM_MC_ADVANCE_RIP_AND_FINISH();
3945 IEM_MC_END();
3946 }
3947 }
3948 else
3949 {
3950 if (IEM_IS_MODRM_REG_MODE(bRm))
3951 {
3952 /* XMM, greg32 */
3953 IEM_MC_BEGIN(3, 2);
3954 IEM_MC_LOCAL(uint32_t, fMxcsr);
3955 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3956 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3957 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3958 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3959
3960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3961 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3962 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3963
3964 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3965 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3966 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3967 IEM_MC_IF_MXCSR_XCPT_PENDING()
3968 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3969 IEM_MC_ELSE()
3970 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3971 IEM_MC_ENDIF();
3972
3973 IEM_MC_ADVANCE_RIP_AND_FINISH();
3974 IEM_MC_END();
3975 }
3976 else
3977 {
3978 /* XMM, [mem32] */
3979 IEM_MC_BEGIN(3, 4);
3980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3981 IEM_MC_LOCAL(uint32_t, fMxcsr);
3982 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3983 IEM_MC_LOCAL(int32_t, i32Src);
3984 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3985 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3986 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3987
3988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3990 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3991 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3992
3993 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3994 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3995 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3996 IEM_MC_IF_MXCSR_XCPT_PENDING()
3997 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3998 IEM_MC_ELSE()
3999 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
4000 IEM_MC_ENDIF();
4001
4002 IEM_MC_ADVANCE_RIP_AND_FINISH();
4003 IEM_MC_END();
4004 }
4005 }
4006}
4007
4008
4009/**
4010 * @opcode 0x2b
4011 * @opcodesub !11 mr/reg
4012 * @oppfx none
4013 * @opcpuid sse
4014 * @opgroup og_sse1_cachect
4015 * @opxcpttype 1
4016 * @optest op1=1 op2=2 -> op1=2
4017 * @optest op1=0 op2=-42 -> op1=-42
4018 */
4019FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
4020{
4021 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4022 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4023 if (IEM_IS_MODRM_MEM_MODE(bRm))
4024 {
4025 /*
4026 * memory, register.
4027 */
4028 IEM_MC_BEGIN(0, 2);
4029 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4030 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4031
4032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4034 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4035 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4036
4037 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4038 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4039
4040 IEM_MC_ADVANCE_RIP_AND_FINISH();
4041 IEM_MC_END();
4042 }
4043 /* The register, register encoding is invalid. */
4044 else
4045 return IEMOP_RAISE_INVALID_OPCODE();
4046}
4047
4048/**
4049 * @opcode 0x2b
4050 * @opcodesub !11 mr/reg
4051 * @oppfx 0x66
4052 * @opcpuid sse2
4053 * @opgroup og_sse2_cachect
4054 * @opxcpttype 1
4055 * @optest op1=1 op2=2 -> op1=2
4056 * @optest op1=0 op2=-42 -> op1=-42
4057 */
4058FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
4059{
4060 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4062 if (IEM_IS_MODRM_MEM_MODE(bRm))
4063 {
4064 /*
4065 * memory, register.
4066 */
4067 IEM_MC_BEGIN(0, 2);
4068 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4069 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4070
4071 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4073 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4074 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4075
4076 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4077 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4078
4079 IEM_MC_ADVANCE_RIP_AND_FINISH();
4080 IEM_MC_END();
4081 }
4082 /* The register, register encoding is invalid. */
4083 else
4084 return IEMOP_RAISE_INVALID_OPCODE();
4085}
4086/* Opcode 0xf3 0x0f 0x2b - invalid */
4087/* Opcode 0xf2 0x0f 0x2b - invalid */
4088
4089
4090/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
4091FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
4092{
4093 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4094 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4095 if (IEM_IS_MODRM_REG_MODE(bRm))
4096 {
4097 /*
4098 * Register, register.
4099 */
4100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4101
4102 IEM_MC_BEGIN(3, 1);
4103 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4104 IEM_MC_LOCAL(uint64_t, u64Dst);
4105 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4106 IEM_MC_ARG(uint64_t, u64Src, 2);
4107 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4108 IEM_MC_PREPARE_FPU_USAGE();
4109 IEM_MC_FPU_TO_MMX_MODE();
4110
4111 IEM_MC_REF_MXCSR(pfMxcsr);
4112 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4113
4114 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4115 IEM_MC_IF_MXCSR_XCPT_PENDING()
4116 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4117 IEM_MC_ELSE()
4118 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4119 IEM_MC_ENDIF();
4120
4121 IEM_MC_ADVANCE_RIP_AND_FINISH();
4122 IEM_MC_END();
4123 }
4124 else
4125 {
4126 /*
4127 * Register, memory.
4128 */
4129 IEM_MC_BEGIN(3, 2);
4130 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4131 IEM_MC_LOCAL(uint64_t, u64Dst);
4132 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4133 IEM_MC_ARG(uint64_t, u64Src, 2);
4134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4135
4136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4138 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4139 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4140
4141 IEM_MC_PREPARE_FPU_USAGE();
4142 IEM_MC_FPU_TO_MMX_MODE();
4143 IEM_MC_REF_MXCSR(pfMxcsr);
4144
4145 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4146 IEM_MC_IF_MXCSR_XCPT_PENDING()
4147 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4148 IEM_MC_ELSE()
4149 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4150 IEM_MC_ENDIF();
4151
4152 IEM_MC_ADVANCE_RIP_AND_FINISH();
4153 IEM_MC_END();
4154 }
4155}
4156
4157
4158/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4159FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4160{
4161 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4163 if (IEM_IS_MODRM_REG_MODE(bRm))
4164 {
4165 /*
4166 * Register, register.
4167 */
4168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4169
4170 IEM_MC_BEGIN(3, 1);
4171 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4172 IEM_MC_LOCAL(uint64_t, u64Dst);
4173 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4174 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4175 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4176 IEM_MC_PREPARE_FPU_USAGE();
4177 IEM_MC_FPU_TO_MMX_MODE();
4178
4179 IEM_MC_REF_MXCSR(pfMxcsr);
4180 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4181
4182 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4183 IEM_MC_IF_MXCSR_XCPT_PENDING()
4184 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4185 IEM_MC_ELSE()
4186 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4187 IEM_MC_ENDIF();
4188
4189 IEM_MC_ADVANCE_RIP_AND_FINISH();
4190 IEM_MC_END();
4191 }
4192 else
4193 {
4194 /*
4195 * Register, memory.
4196 */
4197 IEM_MC_BEGIN(3, 3);
4198 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4199 IEM_MC_LOCAL(uint64_t, u64Dst);
4200 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4201 IEM_MC_LOCAL(X86XMMREG, uSrc);
4202 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4204
4205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4207 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4208 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4209
4210 IEM_MC_PREPARE_FPU_USAGE();
4211 IEM_MC_FPU_TO_MMX_MODE();
4212
4213 IEM_MC_REF_MXCSR(pfMxcsr);
4214
4215 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4216 IEM_MC_IF_MXCSR_XCPT_PENDING()
4217 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4218 IEM_MC_ELSE()
4219 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4220 IEM_MC_ENDIF();
4221
4222 IEM_MC_ADVANCE_RIP_AND_FINISH();
4223 IEM_MC_END();
4224 }
4225}
4226
4227
4228/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4229FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4230{
4231 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4232
4233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4234 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4235 {
4236 if (IEM_IS_MODRM_REG_MODE(bRm))
4237 {
4238 /* greg64, XMM */
4239 IEM_MC_BEGIN(3, 2);
4240 IEM_MC_LOCAL(uint32_t, fMxcsr);
4241 IEM_MC_LOCAL(int64_t, i64Dst);
4242 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4243 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4244 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4245
4246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4247 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4248 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4249
4250 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4251 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4252 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4253 IEM_MC_IF_MXCSR_XCPT_PENDING()
4254 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4255 IEM_MC_ELSE()
4256 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4257 IEM_MC_ENDIF();
4258
4259 IEM_MC_ADVANCE_RIP_AND_FINISH();
4260 IEM_MC_END();
4261 }
4262 else
4263 {
4264 /* greg64, [mem64] */
4265 IEM_MC_BEGIN(3, 4);
4266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4267 IEM_MC_LOCAL(uint32_t, fMxcsr);
4268 IEM_MC_LOCAL(int64_t, i64Dst);
4269 IEM_MC_LOCAL(uint32_t, u32Src);
4270 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4271 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4272 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4273
4274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4276 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4277 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4278
4279 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4280 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4281 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4282 IEM_MC_IF_MXCSR_XCPT_PENDING()
4283 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4284 IEM_MC_ELSE()
4285 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4286 IEM_MC_ENDIF();
4287
4288 IEM_MC_ADVANCE_RIP_AND_FINISH();
4289 IEM_MC_END();
4290 }
4291 }
4292 else
4293 {
4294 if (IEM_IS_MODRM_REG_MODE(bRm))
4295 {
4296 /* greg, XMM */
4297 IEM_MC_BEGIN(3, 2);
4298 IEM_MC_LOCAL(uint32_t, fMxcsr);
4299 IEM_MC_LOCAL(int32_t, i32Dst);
4300 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4301 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4302 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4303
4304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4305 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4306 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4307
4308 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4309 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4310 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4311 IEM_MC_IF_MXCSR_XCPT_PENDING()
4312 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4313 IEM_MC_ELSE()
4314 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4315 IEM_MC_ENDIF();
4316
4317 IEM_MC_ADVANCE_RIP_AND_FINISH();
4318 IEM_MC_END();
4319 }
4320 else
4321 {
4322 /* greg, [mem] */
4323 IEM_MC_BEGIN(3, 4);
4324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4325 IEM_MC_LOCAL(uint32_t, fMxcsr);
4326 IEM_MC_LOCAL(int32_t, i32Dst);
4327 IEM_MC_LOCAL(uint32_t, u32Src);
4328 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4329 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4330 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4331
4332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4334 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4335 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4336
4337 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4338 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4339 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4340 IEM_MC_IF_MXCSR_XCPT_PENDING()
4341 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4342 IEM_MC_ELSE()
4343 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4344 IEM_MC_ENDIF();
4345
4346 IEM_MC_ADVANCE_RIP_AND_FINISH();
4347 IEM_MC_END();
4348 }
4349 }
4350}
4351
4352
4353/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4354FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4355{
4356 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4357
4358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4359 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4360 {
4361 if (IEM_IS_MODRM_REG_MODE(bRm))
4362 {
4363 /* greg64, XMM */
4364 IEM_MC_BEGIN(3, 2);
4365 IEM_MC_LOCAL(uint32_t, fMxcsr);
4366 IEM_MC_LOCAL(int64_t, i64Dst);
4367 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4368 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4369 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4370
4371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4372 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4373 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4374
4375 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4376 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4377 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4378 IEM_MC_IF_MXCSR_XCPT_PENDING()
4379 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4380 IEM_MC_ELSE()
4381 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4382 IEM_MC_ENDIF();
4383
4384 IEM_MC_ADVANCE_RIP_AND_FINISH();
4385 IEM_MC_END();
4386 }
4387 else
4388 {
4389 /* greg64, [mem64] */
4390 IEM_MC_BEGIN(3, 4);
4391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4392 IEM_MC_LOCAL(uint32_t, fMxcsr);
4393 IEM_MC_LOCAL(int64_t, i64Dst);
4394 IEM_MC_LOCAL(uint64_t, u64Src);
4395 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4396 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4397 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4398
4399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4401 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4402 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4403
4404 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4405 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4406 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4407 IEM_MC_IF_MXCSR_XCPT_PENDING()
4408 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4409 IEM_MC_ELSE()
4410 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4411 IEM_MC_ENDIF();
4412
4413 IEM_MC_ADVANCE_RIP_AND_FINISH();
4414 IEM_MC_END();
4415 }
4416 }
4417 else
4418 {
4419 if (IEM_IS_MODRM_REG_MODE(bRm))
4420 {
4421 /* greg, XMM */
4422 IEM_MC_BEGIN(3, 2);
4423 IEM_MC_LOCAL(uint32_t, fMxcsr);
4424 IEM_MC_LOCAL(int32_t, i32Dst);
4425 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4426 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4427 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4428
4429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4430 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4431 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4432
4433 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4434 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4435 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4436 IEM_MC_IF_MXCSR_XCPT_PENDING()
4437 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4438 IEM_MC_ELSE()
4439 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4440 IEM_MC_ENDIF();
4441
4442 IEM_MC_ADVANCE_RIP_AND_FINISH();
4443 IEM_MC_END();
4444 }
4445 else
4446 {
4447 /* greg32, [mem32] */
4448 IEM_MC_BEGIN(3, 4);
4449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4450 IEM_MC_LOCAL(uint32_t, fMxcsr);
4451 IEM_MC_LOCAL(int32_t, i32Dst);
4452 IEM_MC_LOCAL(uint64_t, u64Src);
4453 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4454 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4455 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4456
4457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4459 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4460 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4461
4462 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4463 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4464 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4465 IEM_MC_IF_MXCSR_XCPT_PENDING()
4466 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4467 IEM_MC_ELSE()
4468 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4469 IEM_MC_ENDIF();
4470
4471 IEM_MC_ADVANCE_RIP_AND_FINISH();
4472 IEM_MC_END();
4473 }
4474 }
4475}
4476
4477
4478/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4479FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4480{
4481 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4482 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4483 if (IEM_IS_MODRM_REG_MODE(bRm))
4484 {
4485 /*
4486 * Register, register.
4487 */
4488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4489
4490 IEM_MC_BEGIN(3, 1);
4491 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4492 IEM_MC_LOCAL(uint64_t, u64Dst);
4493 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4494 IEM_MC_ARG(uint64_t, u64Src, 2);
4495 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4496 IEM_MC_PREPARE_FPU_USAGE();
4497 IEM_MC_FPU_TO_MMX_MODE();
4498
4499 IEM_MC_REF_MXCSR(pfMxcsr);
4500 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4501
4502 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4503 IEM_MC_IF_MXCSR_XCPT_PENDING()
4504 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4505 IEM_MC_ELSE()
4506 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4507 IEM_MC_ENDIF();
4508
4509 IEM_MC_ADVANCE_RIP_AND_FINISH();
4510 IEM_MC_END();
4511 }
4512 else
4513 {
4514 /*
4515 * Register, memory.
4516 */
4517 IEM_MC_BEGIN(3, 2);
4518 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4519 IEM_MC_LOCAL(uint64_t, u64Dst);
4520 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4521 IEM_MC_ARG(uint64_t, u64Src, 2);
4522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4523
4524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4526 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4527 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4528
4529 IEM_MC_PREPARE_FPU_USAGE();
4530 IEM_MC_FPU_TO_MMX_MODE();
4531 IEM_MC_REF_MXCSR(pfMxcsr);
4532
4533 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4534 IEM_MC_IF_MXCSR_XCPT_PENDING()
4535 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4536 IEM_MC_ELSE()
4537 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4538 IEM_MC_ENDIF();
4539
4540 IEM_MC_ADVANCE_RIP_AND_FINISH();
4541 IEM_MC_END();
4542 }
4543}
4544
4545
4546/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4547FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4548{
4549 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4550 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4551 if (IEM_IS_MODRM_REG_MODE(bRm))
4552 {
4553 /*
4554 * Register, register.
4555 */
4556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4557
4558 IEM_MC_BEGIN(3, 1);
4559 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4560 IEM_MC_LOCAL(uint64_t, u64Dst);
4561 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4562 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4563 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4564 IEM_MC_PREPARE_FPU_USAGE();
4565 IEM_MC_FPU_TO_MMX_MODE();
4566
4567 IEM_MC_REF_MXCSR(pfMxcsr);
4568 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4569
4570 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4571 IEM_MC_IF_MXCSR_XCPT_PENDING()
4572 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4573 IEM_MC_ELSE()
4574 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4575 IEM_MC_ENDIF();
4576
4577 IEM_MC_ADVANCE_RIP_AND_FINISH();
4578 IEM_MC_END();
4579 }
4580 else
4581 {
4582 /*
4583 * Register, memory.
4584 */
4585 IEM_MC_BEGIN(3, 3);
4586 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4587 IEM_MC_LOCAL(uint64_t, u64Dst);
4588 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4589 IEM_MC_LOCAL(X86XMMREG, uSrc);
4590 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4592
4593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4595 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4596 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4597
4598 IEM_MC_PREPARE_FPU_USAGE();
4599 IEM_MC_FPU_TO_MMX_MODE();
4600
4601 IEM_MC_REF_MXCSR(pfMxcsr);
4602
4603 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4604 IEM_MC_IF_MXCSR_XCPT_PENDING()
4605 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4606 IEM_MC_ELSE()
4607 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4608 IEM_MC_ENDIF();
4609
4610 IEM_MC_ADVANCE_RIP_AND_FINISH();
4611 IEM_MC_END();
4612 }
4613}
4614
4615
4616/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4617FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4618{
4619 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4620
4621 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4622 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4623 {
4624 if (IEM_IS_MODRM_REG_MODE(bRm))
4625 {
4626 /* greg64, XMM */
4627 IEM_MC_BEGIN(3, 2);
4628 IEM_MC_LOCAL(uint32_t, fMxcsr);
4629 IEM_MC_LOCAL(int64_t, i64Dst);
4630 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4631 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4632 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4633
4634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4635 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4636 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4637
4638 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4639 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4640 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4641 IEM_MC_IF_MXCSR_XCPT_PENDING()
4642 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4643 IEM_MC_ELSE()
4644 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4645 IEM_MC_ENDIF();
4646
4647 IEM_MC_ADVANCE_RIP_AND_FINISH();
4648 IEM_MC_END();
4649 }
4650 else
4651 {
4652 /* greg64, [mem64] */
4653 IEM_MC_BEGIN(3, 4);
4654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4655 IEM_MC_LOCAL(uint32_t, fMxcsr);
4656 IEM_MC_LOCAL(int64_t, i64Dst);
4657 IEM_MC_LOCAL(uint32_t, u32Src);
4658 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4659 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4660 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4661
4662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4664 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4665 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4666
4667 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4668 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4669 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4670 IEM_MC_IF_MXCSR_XCPT_PENDING()
4671 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4672 IEM_MC_ELSE()
4673 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4674 IEM_MC_ENDIF();
4675
4676 IEM_MC_ADVANCE_RIP_AND_FINISH();
4677 IEM_MC_END();
4678 }
4679 }
4680 else
4681 {
4682 if (IEM_IS_MODRM_REG_MODE(bRm))
4683 {
4684 /* greg, XMM */
4685 IEM_MC_BEGIN(3, 2);
4686 IEM_MC_LOCAL(uint32_t, fMxcsr);
4687 IEM_MC_LOCAL(int32_t, i32Dst);
4688 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4689 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4690 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4691
4692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4693 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4694 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4695
4696 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4697 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4698 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4699 IEM_MC_IF_MXCSR_XCPT_PENDING()
4700 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4701 IEM_MC_ELSE()
4702 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4703 IEM_MC_ENDIF();
4704
4705 IEM_MC_ADVANCE_RIP_AND_FINISH();
4706 IEM_MC_END();
4707 }
4708 else
4709 {
4710 /* greg, [mem] */
4711 IEM_MC_BEGIN(3, 4);
4712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4713 IEM_MC_LOCAL(uint32_t, fMxcsr);
4714 IEM_MC_LOCAL(int32_t, i32Dst);
4715 IEM_MC_LOCAL(uint32_t, u32Src);
4716 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4717 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4718 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4719
4720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4722 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4723 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4724
4725 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4726 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4727 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4728 IEM_MC_IF_MXCSR_XCPT_PENDING()
4729 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4730 IEM_MC_ELSE()
4731 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4732 IEM_MC_ENDIF();
4733
4734 IEM_MC_ADVANCE_RIP_AND_FINISH();
4735 IEM_MC_END();
4736 }
4737 }
4738}
4739
4740
4741/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4742FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4743{
4744 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4745
4746 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4747 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4748 {
4749 if (IEM_IS_MODRM_REG_MODE(bRm))
4750 {
4751 /* greg64, XMM */
4752 IEM_MC_BEGIN(3, 2);
4753 IEM_MC_LOCAL(uint32_t, fMxcsr);
4754 IEM_MC_LOCAL(int64_t, i64Dst);
4755 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4756 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4757 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4758
4759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4760 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4761 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4762
4763 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4764 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4765 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4766 IEM_MC_IF_MXCSR_XCPT_PENDING()
4767 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4768 IEM_MC_ELSE()
4769 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4770 IEM_MC_ENDIF();
4771
4772 IEM_MC_ADVANCE_RIP_AND_FINISH();
4773 IEM_MC_END();
4774 }
4775 else
4776 {
4777 /* greg64, [mem64] */
4778 IEM_MC_BEGIN(3, 4);
4779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4780 IEM_MC_LOCAL(uint32_t, fMxcsr);
4781 IEM_MC_LOCAL(int64_t, i64Dst);
4782 IEM_MC_LOCAL(uint64_t, u64Src);
4783 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4784 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4785 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4786
4787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4789 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4790 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4791
4792 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4793 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4794 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4795 IEM_MC_IF_MXCSR_XCPT_PENDING()
4796 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4797 IEM_MC_ELSE()
4798 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4799 IEM_MC_ENDIF();
4800
4801 IEM_MC_ADVANCE_RIP_AND_FINISH();
4802 IEM_MC_END();
4803 }
4804 }
4805 else
4806 {
4807 if (IEM_IS_MODRM_REG_MODE(bRm))
4808 {
4809 /* greg32, XMM */
4810 IEM_MC_BEGIN(3, 2);
4811 IEM_MC_LOCAL(uint32_t, fMxcsr);
4812 IEM_MC_LOCAL(int32_t, i32Dst);
4813 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4814 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4815 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4816
4817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4818 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4819 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4820
4821 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4822 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4823 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4824 IEM_MC_IF_MXCSR_XCPT_PENDING()
4825 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4826 IEM_MC_ELSE()
4827 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4828 IEM_MC_ENDIF();
4829
4830 IEM_MC_ADVANCE_RIP_AND_FINISH();
4831 IEM_MC_END();
4832 }
4833 else
4834 {
4835 /* greg32, [mem64] */
4836 IEM_MC_BEGIN(3, 4);
4837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4838 IEM_MC_LOCAL(uint32_t, fMxcsr);
4839 IEM_MC_LOCAL(int32_t, i32Dst);
4840 IEM_MC_LOCAL(uint64_t, u64Src);
4841 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4842 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4843 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4844
4845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4847 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4848 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4849
4850 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4851 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4852 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4853 IEM_MC_IF_MXCSR_XCPT_PENDING()
4854 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4855 IEM_MC_ELSE()
4856 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4857 IEM_MC_ENDIF();
4858
4859 IEM_MC_ADVANCE_RIP_AND_FINISH();
4860 IEM_MC_END();
4861 }
4862 }
4863}
4864
4865
4866/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
4867FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4868{
4869 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4870 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4871 if (IEM_IS_MODRM_REG_MODE(bRm))
4872 {
4873 /*
4874 * Register, register.
4875 */
4876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4877 IEM_MC_BEGIN(4, 1);
4878 IEM_MC_LOCAL(uint32_t, fEFlags);
4879 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4880 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4881 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4882 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4883 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4884 IEM_MC_PREPARE_SSE_USAGE();
4885 IEM_MC_FETCH_EFLAGS(fEFlags);
4886 IEM_MC_REF_MXCSR(pfMxcsr);
4887 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4888 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4889 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4890 IEM_MC_IF_MXCSR_XCPT_PENDING()
4891 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4892 IEM_MC_ELSE()
4893 IEM_MC_COMMIT_EFLAGS(fEFlags);
4894 IEM_MC_ENDIF();
4895
4896 IEM_MC_ADVANCE_RIP_AND_FINISH();
4897 IEM_MC_END();
4898 }
4899 else
4900 {
4901 /*
4902 * Register, memory.
4903 */
4904 IEM_MC_BEGIN(4, 3);
4905 IEM_MC_LOCAL(uint32_t, fEFlags);
4906 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4907 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4908 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4909 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4910 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4912
4913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4915 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4916 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4917
4918 IEM_MC_PREPARE_SSE_USAGE();
4919 IEM_MC_FETCH_EFLAGS(fEFlags);
4920 IEM_MC_REF_MXCSR(pfMxcsr);
4921 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4922 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4923 IEM_MC_IF_MXCSR_XCPT_PENDING()
4924 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4925 IEM_MC_ELSE()
4926 IEM_MC_COMMIT_EFLAGS(fEFlags);
4927 IEM_MC_ENDIF();
4928
4929 IEM_MC_ADVANCE_RIP_AND_FINISH();
4930 IEM_MC_END();
4931 }
4932}
4933
4934
4935/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
4936FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4937{
4938 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4939 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4940 if (IEM_IS_MODRM_REG_MODE(bRm))
4941 {
4942 /*
4943 * Register, register.
4944 */
4945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4946 IEM_MC_BEGIN(4, 1);
4947 IEM_MC_LOCAL(uint32_t, fEFlags);
4948 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4949 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4950 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4951 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4952 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4953 IEM_MC_PREPARE_SSE_USAGE();
4954 IEM_MC_FETCH_EFLAGS(fEFlags);
4955 IEM_MC_REF_MXCSR(pfMxcsr);
4956 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4957 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4958 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4959 IEM_MC_IF_MXCSR_XCPT_PENDING()
4960 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4961 IEM_MC_ELSE()
4962 IEM_MC_COMMIT_EFLAGS(fEFlags);
4963 IEM_MC_ENDIF();
4964
4965 IEM_MC_ADVANCE_RIP_AND_FINISH();
4966 IEM_MC_END();
4967 }
4968 else
4969 {
4970 /*
4971 * Register, memory.
4972 */
4973 IEM_MC_BEGIN(4, 3);
4974 IEM_MC_LOCAL(uint32_t, fEFlags);
4975 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4976 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4977 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4978 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4979 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4981
4982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4984 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4985 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4986
4987 IEM_MC_PREPARE_SSE_USAGE();
4988 IEM_MC_FETCH_EFLAGS(fEFlags);
4989 IEM_MC_REF_MXCSR(pfMxcsr);
4990 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4991 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4992 IEM_MC_IF_MXCSR_XCPT_PENDING()
4993 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4994 IEM_MC_ELSE()
4995 IEM_MC_COMMIT_EFLAGS(fEFlags);
4996 IEM_MC_ENDIF();
4997
4998 IEM_MC_ADVANCE_RIP_AND_FINISH();
4999 IEM_MC_END();
5000 }
5001}
5002
5003
5004/* Opcode 0xf3 0x0f 0x2e - invalid */
5005/* Opcode 0xf2 0x0f 0x2e - invalid */
5006
5007
5008/** Opcode 0x0f 0x2f - comiss Vss, Wss */
5009FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
5010{
5011 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5012 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5013 if (IEM_IS_MODRM_REG_MODE(bRm))
5014 {
5015 /*
5016 * Register, register.
5017 */
5018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5019 IEM_MC_BEGIN(4, 1);
5020 IEM_MC_LOCAL(uint32_t, fEFlags);
5021 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5022 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5023 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5024 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5025 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5026 IEM_MC_PREPARE_SSE_USAGE();
5027 IEM_MC_FETCH_EFLAGS(fEFlags);
5028 IEM_MC_REF_MXCSR(pfMxcsr);
5029 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5030 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5031 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5032 IEM_MC_IF_MXCSR_XCPT_PENDING()
5033 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5034 IEM_MC_ELSE()
5035 IEM_MC_COMMIT_EFLAGS(fEFlags);
5036 IEM_MC_ENDIF();
5037
5038 IEM_MC_ADVANCE_RIP_AND_FINISH();
5039 IEM_MC_END();
5040 }
5041 else
5042 {
5043 /*
5044 * Register, memory.
5045 */
5046 IEM_MC_BEGIN(4, 3);
5047 IEM_MC_LOCAL(uint32_t, fEFlags);
5048 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5049 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5050 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5051 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5052 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5054
5055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5057 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5058 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5059
5060 IEM_MC_PREPARE_SSE_USAGE();
5061 IEM_MC_FETCH_EFLAGS(fEFlags);
5062 IEM_MC_REF_MXCSR(pfMxcsr);
5063 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5064 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5065 IEM_MC_IF_MXCSR_XCPT_PENDING()
5066 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5067 IEM_MC_ELSE()
5068 IEM_MC_COMMIT_EFLAGS(fEFlags);
5069 IEM_MC_ENDIF();
5070
5071 IEM_MC_ADVANCE_RIP_AND_FINISH();
5072 IEM_MC_END();
5073 }
5074}
5075
5076
5077/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
5078FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
5079{
5080 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5082 if (IEM_IS_MODRM_REG_MODE(bRm))
5083 {
5084 /*
5085 * Register, register.
5086 */
5087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5088 IEM_MC_BEGIN(4, 1);
5089 IEM_MC_LOCAL(uint32_t, fEFlags);
5090 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5091 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5092 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5093 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5094 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5095 IEM_MC_PREPARE_SSE_USAGE();
5096 IEM_MC_FETCH_EFLAGS(fEFlags);
5097 IEM_MC_REF_MXCSR(pfMxcsr);
5098 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5099 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5100 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5101 IEM_MC_IF_MXCSR_XCPT_PENDING()
5102 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5103 IEM_MC_ELSE()
5104 IEM_MC_COMMIT_EFLAGS(fEFlags);
5105 IEM_MC_ENDIF();
5106
5107 IEM_MC_ADVANCE_RIP_AND_FINISH();
5108 IEM_MC_END();
5109 }
5110 else
5111 {
5112 /*
5113 * Register, memory.
5114 */
5115 IEM_MC_BEGIN(4, 3);
5116 IEM_MC_LOCAL(uint32_t, fEFlags);
5117 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5118 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5119 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5120 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5121 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5123
5124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5126 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5127 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5128
5129 IEM_MC_PREPARE_SSE_USAGE();
5130 IEM_MC_FETCH_EFLAGS(fEFlags);
5131 IEM_MC_REF_MXCSR(pfMxcsr);
5132 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5133 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5134 IEM_MC_IF_MXCSR_XCPT_PENDING()
5135 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5136 IEM_MC_ELSE()
5137 IEM_MC_COMMIT_EFLAGS(fEFlags);
5138 IEM_MC_ENDIF();
5139
5140 IEM_MC_ADVANCE_RIP_AND_FINISH();
5141 IEM_MC_END();
5142 }
5143}
5144
5145
5146/* Opcode 0xf3 0x0f 0x2f - invalid */
5147/* Opcode 0xf2 0x0f 0x2f - invalid */
5148
5149/** Opcode 0x0f 0x30. */
5150FNIEMOP_DEF(iemOp_wrmsr)
5151{
5152 IEMOP_MNEMONIC(wrmsr, "wrmsr");
5153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5154 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
5155}
5156
5157
5158/** Opcode 0x0f 0x31. */
5159FNIEMOP_DEF(iemOp_rdtsc)
5160{
5161 IEMOP_MNEMONIC(rdtsc, "rdtsc");
5162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5163 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
5164}
5165
5166
5167/** Opcode 0x0f 0x33. */
5168FNIEMOP_DEF(iemOp_rdmsr)
5169{
5170 IEMOP_MNEMONIC(rdmsr, "rdmsr");
5171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5172 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
5173}
5174
5175
5176/** Opcode 0x0f 0x34. */
5177FNIEMOP_DEF(iemOp_rdpmc)
5178{
5179 IEMOP_MNEMONIC(rdpmc, "rdpmc");
5180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5181 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
5182}
5183
5184
5185/** Opcode 0x0f 0x34. */
5186FNIEMOP_DEF(iemOp_sysenter)
5187{
5188 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5190 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
5191}
5192
5193/** Opcode 0x0f 0x35. */
5194FNIEMOP_DEF(iemOp_sysexit)
5195{
5196 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5198 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
5199}
5200
5201/** Opcode 0x0f 0x37. */
5202FNIEMOP_STUB(iemOp_getsec);
5203
5204
5205/** Opcode 0x0f 0x38. */
5206FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
5207{
5208#ifdef IEM_WITH_THREE_0F_38
5209 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5210 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5211#else
5212 IEMOP_BITCH_ABOUT_STUB();
5213 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5214#endif
5215}
5216
5217
5218/** Opcode 0x0f 0x3a. */
5219FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
5220{
5221#ifdef IEM_WITH_THREE_0F_3A
5222 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5223 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5224#else
5225 IEMOP_BITCH_ABOUT_STUB();
5226 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5227#endif
5228}
5229
5230
5231/**
5232 * Implements a conditional move.
5233 *
5234 * Wish there was an obvious way to do this where we could share and reduce
5235 * code bloat.
5236 *
5237 * @param a_Cnd The conditional "microcode" operation.
5238 */
5239#define CMOV_X(a_Cnd) \
5240 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5241 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5242 { \
5243 switch (pVCpu->iem.s.enmEffOpSize) \
5244 { \
5245 case IEMMODE_16BIT: \
5246 IEM_MC_BEGIN(0, 1); \
5247 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5248 a_Cnd { \
5249 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5250 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5251 } IEM_MC_ENDIF(); \
5252 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5253 IEM_MC_END(); \
5254 break; \
5255 \
5256 case IEMMODE_32BIT: \
5257 IEM_MC_BEGIN(0, 1); \
5258 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5259 a_Cnd { \
5260 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5261 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5262 } IEM_MC_ELSE() { \
5263 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5264 } IEM_MC_ENDIF(); \
5265 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5266 IEM_MC_END(); \
5267 break; \
5268 \
5269 case IEMMODE_64BIT: \
5270 IEM_MC_BEGIN(0, 1); \
5271 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5272 a_Cnd { \
5273 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5274 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5275 } IEM_MC_ENDIF(); \
5276 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5277 IEM_MC_END(); \
5278 break; \
5279 \
5280 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5281 } \
5282 } \
5283 else \
5284 { \
5285 switch (pVCpu->iem.s.enmEffOpSize) \
5286 { \
5287 case IEMMODE_16BIT: \
5288 IEM_MC_BEGIN(0, 2); \
5289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5290 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5292 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5293 a_Cnd { \
5294 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5295 } IEM_MC_ENDIF(); \
5296 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5297 IEM_MC_END(); \
5298 break; \
5299 \
5300 case IEMMODE_32BIT: \
5301 IEM_MC_BEGIN(0, 2); \
5302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5303 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5305 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5306 a_Cnd { \
5307 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5308 } IEM_MC_ELSE() { \
5309 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5310 } IEM_MC_ENDIF(); \
5311 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5312 IEM_MC_END(); \
5313 break; \
5314 \
5315 case IEMMODE_64BIT: \
5316 IEM_MC_BEGIN(0, 2); \
5317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5318 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5320 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5321 a_Cnd { \
5322 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5323 } IEM_MC_ENDIF(); \
5324 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5325 IEM_MC_END(); \
5326 break; \
5327 \
5328 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5329 } \
5330 } do {} while (0)
5331
5332
5333
5334/** Opcode 0x0f 0x40. */
5335FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5336{
5337 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5338 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5339}
5340
5341
5342/** Opcode 0x0f 0x41. */
5343FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5344{
5345 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5346 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5347}
5348
5349
5350/** Opcode 0x0f 0x42. */
5351FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5352{
5353 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5354 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5355}
5356
5357
5358/** Opcode 0x0f 0x43. */
5359FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5360{
5361 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5362 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5363}
5364
5365
5366/** Opcode 0x0f 0x44. */
5367FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5368{
5369 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5370 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5371}
5372
5373
5374/** Opcode 0x0f 0x45. */
5375FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5376{
5377 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5378 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5379}
5380
5381
5382/** Opcode 0x0f 0x46. */
5383FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5384{
5385 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5386 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5387}
5388
5389
5390/** Opcode 0x0f 0x47. */
5391FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5392{
5393 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5394 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5395}
5396
5397
5398/** Opcode 0x0f 0x48. */
5399FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5400{
5401 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5402 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5403}
5404
5405
5406/** Opcode 0x0f 0x49. */
5407FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5408{
5409 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5410 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5411}
5412
5413
5414/** Opcode 0x0f 0x4a. */
5415FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5416{
5417 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5418 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5419}
5420
5421
5422/** Opcode 0x0f 0x4b. */
5423FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5424{
5425 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5426 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5427}
5428
5429
5430/** Opcode 0x0f 0x4c. */
5431FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5432{
5433 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5434 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5435}
5436
5437
5438/** Opcode 0x0f 0x4d. */
5439FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5440{
5441 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5442 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5443}
5444
5445
5446/** Opcode 0x0f 0x4e. */
5447FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5448{
5449 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5450 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5451}
5452
5453
5454/** Opcode 0x0f 0x4f. */
5455FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5456{
5457 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5458 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5459}
5460
5461#undef CMOV_X
5462
5463/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5464FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5465{
5466 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /** @todo */
5467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5468 if (IEM_IS_MODRM_REG_MODE(bRm))
5469 {
5470 /*
5471 * Register, register.
5472 */
5473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5474 IEM_MC_BEGIN(2, 1);
5475 IEM_MC_LOCAL(uint8_t, u8Dst);
5476 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5477 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5478 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5479 IEM_MC_PREPARE_SSE_USAGE();
5480 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5481 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5482 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5483 IEM_MC_ADVANCE_RIP_AND_FINISH();
5484 IEM_MC_END();
5485 }
5486 /* No memory operand. */
5487 else
5488 return IEMOP_RAISE_INVALID_OPCODE();
5489}
5490
5491
5492/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5493FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5494{
5495 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /** @todo */
5496 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5497 if (IEM_IS_MODRM_REG_MODE(bRm))
5498 {
5499 /*
5500 * Register, register.
5501 */
5502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5503 IEM_MC_BEGIN(2, 1);
5504 IEM_MC_LOCAL(uint8_t, u8Dst);
5505 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5506 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5507 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5508 IEM_MC_PREPARE_SSE_USAGE();
5509 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5510 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5511 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG_8(bRm), u8Dst);
5512 IEM_MC_ADVANCE_RIP_AND_FINISH();
5513 IEM_MC_END();
5514 }
5515 /* No memory operand. */
5516 else
5517 return IEMOP_RAISE_INVALID_OPCODE();
5518
5519}
5520
5521
5522/* Opcode 0xf3 0x0f 0x50 - invalid */
5523/* Opcode 0xf2 0x0f 0x50 - invalid */
5524
5525
5526/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5527FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5528{
5529 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5530 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5531}
5532
5533
5534/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5535FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5536{
5537 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5538 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5539}
5540
5541
5542/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5543FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5544{
5545 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5546 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5547}
5548
5549
5550/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5551FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5552{
5553 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5554 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5555}
5556
5557
5558/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5559FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
5560/* Opcode 0x66 0x0f 0x52 - invalid */
5561/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5562FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
5563/* Opcode 0xf2 0x0f 0x52 - invalid */
5564
5565/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5566FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
5567/* Opcode 0x66 0x0f 0x53 - invalid */
5568/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5569FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
5570/* Opcode 0xf2 0x0f 0x53 - invalid */
5571
5572
5573/** Opcode 0x0f 0x54 - andps Vps, Wps */
5574FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5575{
5576 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5577 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pand_u128);
5578}
5579
5580
5581/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5582FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5583{
5584 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5585 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5586}
5587
5588
5589/* Opcode 0xf3 0x0f 0x54 - invalid */
5590/* Opcode 0xf2 0x0f 0x54 - invalid */
5591
5592
5593/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5594FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5595{
5596 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5597 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pandn_u128);
5598}
5599
5600
5601/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5602FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5603{
5604 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5605 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5606}
5607
5608
5609/* Opcode 0xf3 0x0f 0x55 - invalid */
5610/* Opcode 0xf2 0x0f 0x55 - invalid */
5611
5612
5613/** Opcode 0x0f 0x56 - orps Vps, Wps */
5614FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5615{
5616 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5617 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_por_u128);
5618}
5619
5620
5621/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5622FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5623{
5624 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5625 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5626}
5627
5628
5629/* Opcode 0xf3 0x0f 0x56 - invalid */
5630/* Opcode 0xf2 0x0f 0x56 - invalid */
5631
5632
5633/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5634FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5635{
5636 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5637 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pxor_u128);
5638}
5639
5640
5641/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5642FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5643{
5644 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5645 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5646}
5647
5648
5649/* Opcode 0xf3 0x0f 0x57 - invalid */
5650/* Opcode 0xf2 0x0f 0x57 - invalid */
5651
5652/** Opcode 0x0f 0x58 - addps Vps, Wps */
5653FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5654{
5655 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5656 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5657}
5658
5659
5660/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5661FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5662{
5663 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5664 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5665}
5666
5667
5668/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5669FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5670{
5671 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5672 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5673}
5674
5675
5676/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5677FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5678{
5679 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5680 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5681}
5682
5683
5684/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5685FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5686{
5687 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5688 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5689}
5690
5691
5692/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5693FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5694{
5695 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5696 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5697}
5698
5699
5700/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5701FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5702{
5703 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5704 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5705}
5706
5707
5708/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5709FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5710{
5711 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5712 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5713}
5714
5715
5716/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5717FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5718{
5719 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5720 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5721}
5722
5723
5724/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5725FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5726{
5727 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5728 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5729}
5730
5731
5732/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5733FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5734{
5735 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5736 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5737}
5738
5739
5740/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5741FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5742{
5743 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5744 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5745}
5746
5747
5748/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5749FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5750{
5751 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5752 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5753}
5754
5755
5756/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5757FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5758{
5759 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5760 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5761}
5762
5763
5764/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5765FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5766{
5767 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5768 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5769}
5770
5771
5772/* Opcode 0xf2 0x0f 0x5b - invalid */
5773
5774
5775/** Opcode 0x0f 0x5c - subps Vps, Wps */
5776FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5777{
5778 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5779 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5780}
5781
5782
5783/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5784FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5785{
5786 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5787 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5788}
5789
5790
5791/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5792FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5793{
5794 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5795 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5796}
5797
5798
5799/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5800FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5801{
5802 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5803 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5804}
5805
5806
5807/** Opcode 0x0f 0x5d - minps Vps, Wps */
5808FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5809{
5810 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5811 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5812}
5813
5814
5815/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5816FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5817{
5818 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5819 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5820}
5821
5822
5823/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5824FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5825{
5826 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5827 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5828}
5829
5830
5831/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5832FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5833{
5834 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5835 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5836}
5837
5838
5839/** Opcode 0x0f 0x5e - divps Vps, Wps */
5840FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5841{
5842 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5843 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5844}
5845
5846
5847/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5848FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5849{
5850 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5851 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5852}
5853
5854
5855/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5856FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5857{
5858 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5859 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5860}
5861
5862
5863/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5864FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5865{
5866 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5867 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5868}
5869
5870
5871/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5872FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5873{
5874 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5875 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5876}
5877
5878
5879/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5880FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5881{
5882 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5883 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5884}
5885
5886
5887/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5888FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5889{
5890 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5891 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5892}
5893
5894
5895/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5896FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5897{
5898 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5899 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5900}
5901
5902
5903/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5904FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5905{
5906 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5907 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5908}
5909
5910
5911/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5912FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5913{
5914 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5915 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5916}
5917
5918
5919/* Opcode 0xf3 0x0f 0x60 - invalid */
5920
5921
5922/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5923FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5924{
5925 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5926 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5927 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5928}
5929
5930
5931/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5932FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5933{
5934 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5935 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5936}
5937
5938
5939/* Opcode 0xf3 0x0f 0x61 - invalid */
5940
5941
5942/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5943FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5944{
5945 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5946 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5947}
5948
5949
5950/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5951FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5952{
5953 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5954 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5955}
5956
5957
5958/* Opcode 0xf3 0x0f 0x62 - invalid */
5959
5960
5961
5962/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5963FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5964{
5965 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5966 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5967}
5968
5969
5970/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5971FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5972{
5973 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5974 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5975}
5976
5977
5978/* Opcode 0xf3 0x0f 0x63 - invalid */
5979
5980
5981/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5982FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5983{
5984 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5985 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5986}
5987
5988
5989/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5990FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5991{
5992 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5993 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
5994}
5995
5996
5997/* Opcode 0xf3 0x0f 0x64 - invalid */
5998
5999
6000/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
6001FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
6002{
6003 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6004 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
6005}
6006
6007
6008/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
6009FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
6010{
6011 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6012 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
6013}
6014
6015
6016/* Opcode 0xf3 0x0f 0x65 - invalid */
6017
6018
6019/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
6020FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
6021{
6022 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6023 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
6024}
6025
6026
6027/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
6028FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
6029{
6030 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6031 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
6032}
6033
6034
6035/* Opcode 0xf3 0x0f 0x66 - invalid */
6036
6037
6038/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
6039FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
6040{
6041 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6042 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
6043}
6044
6045
6046/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
6047FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
6048{
6049 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6050 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
6051}
6052
6053
6054/* Opcode 0xf3 0x0f 0x67 - invalid */
6055
6056
6057/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
6058 * @note Intel and AMD both uses Qd for the second parameter, however they
6059 * both list it as a mmX/mem64 operand and intel describes it as being
6060 * loaded as a qword, so it should be Qq, shouldn't it? */
6061FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
6062{
6063 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6064 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
6065}
6066
6067
6068/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
6069FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
6070{
6071 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6072 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
6073}
6074
6075
6076/* Opcode 0xf3 0x0f 0x68 - invalid */
6077
6078
6079/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
6080 * @note Intel and AMD both uses Qd for the second parameter, however they
6081 * both list it as a mmX/mem64 operand and intel describes it as being
6082 * loaded as a qword, so it should be Qq, shouldn't it? */
6083FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
6084{
6085 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6086 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
6087}
6088
6089
6090/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
6091FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
6092{
6093 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6094 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
6095
6096}
6097
6098
6099/* Opcode 0xf3 0x0f 0x69 - invalid */
6100
6101
6102/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
6103 * @note Intel and AMD both uses Qd for the second parameter, however they
6104 * both list it as a mmX/mem64 operand and intel describes it as being
6105 * loaded as a qword, so it should be Qq, shouldn't it? */
6106FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
6107{
6108 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6109 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
6110}
6111
6112
6113/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
6114FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
6115{
6116 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6117 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
6118}
6119
6120
6121/* Opcode 0xf3 0x0f 0x6a - invalid */
6122
6123
6124/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
6125FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
6126{
6127 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6128 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
6129}
6130
6131
6132/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
6133FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
6134{
6135 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6136 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
6137}
6138
6139
6140/* Opcode 0xf3 0x0f 0x6b - invalid */
6141
6142
6143/* Opcode 0x0f 0x6c - invalid */
6144
6145
6146/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
6147FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
6148{
6149 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6150 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
6151}
6152
6153
6154/* Opcode 0xf3 0x0f 0x6c - invalid */
6155/* Opcode 0xf2 0x0f 0x6c - invalid */
6156
6157
6158/* Opcode 0x0f 0x6d - invalid */
6159
6160
6161/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
6162FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
6163{
6164 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6165 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6166}
6167
6168
6169/* Opcode 0xf3 0x0f 0x6d - invalid */
6170
6171
6172FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6173{
6174 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6175 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6176 {
6177 /**
6178 * @opcode 0x6e
6179 * @opcodesub rex.w=1
6180 * @oppfx none
6181 * @opcpuid mmx
6182 * @opgroup og_mmx_datamove
6183 * @opxcpttype 5
6184 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6185 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6186 */
6187 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6188 if (IEM_IS_MODRM_REG_MODE(bRm))
6189 {
6190 /* MMX, greg64 */
6191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6192 IEM_MC_BEGIN(0, 1);
6193 IEM_MC_LOCAL(uint64_t, u64Tmp);
6194
6195 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6196 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6197 IEM_MC_FPU_TO_MMX_MODE();
6198
6199 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6200 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6201
6202 IEM_MC_ADVANCE_RIP_AND_FINISH();
6203 IEM_MC_END();
6204 }
6205 else
6206 {
6207 /* MMX, [mem64] */
6208 IEM_MC_BEGIN(0, 2);
6209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6210 IEM_MC_LOCAL(uint64_t, u64Tmp);
6211
6212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6214 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6215 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6216 IEM_MC_FPU_TO_MMX_MODE();
6217
6218 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6219 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6220
6221 IEM_MC_ADVANCE_RIP_AND_FINISH();
6222 IEM_MC_END();
6223 }
6224 }
6225 else
6226 {
6227 /**
6228 * @opdone
6229 * @opcode 0x6e
6230 * @opcodesub rex.w=0
6231 * @oppfx none
6232 * @opcpuid mmx
6233 * @opgroup og_mmx_datamove
6234 * @opxcpttype 5
6235 * @opfunction iemOp_movd_q_Pd_Ey
6236 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6237 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6238 */
6239 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6240 if (IEM_IS_MODRM_REG_MODE(bRm))
6241 {
6242 /* MMX, greg32 */
6243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6244 IEM_MC_BEGIN(0, 1);
6245 IEM_MC_LOCAL(uint32_t, u32Tmp);
6246
6247 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6248 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6249 IEM_MC_FPU_TO_MMX_MODE();
6250
6251 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6252 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6253
6254 IEM_MC_ADVANCE_RIP_AND_FINISH();
6255 IEM_MC_END();
6256 }
6257 else
6258 {
6259 /* MMX, [mem32] */
6260 IEM_MC_BEGIN(0, 2);
6261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6262 IEM_MC_LOCAL(uint32_t, u32Tmp);
6263
6264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6267 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6268 IEM_MC_FPU_TO_MMX_MODE();
6269
6270 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6271 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6272
6273 IEM_MC_ADVANCE_RIP_AND_FINISH();
6274 IEM_MC_END();
6275 }
6276 }
6277}
6278
6279FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6280{
6281 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6282 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6283 {
6284 /**
6285 * @opcode 0x6e
6286 * @opcodesub rex.w=1
6287 * @oppfx 0x66
6288 * @opcpuid sse2
6289 * @opgroup og_sse2_simdint_datamove
6290 * @opxcpttype 5
6291 * @optest 64-bit / op1=1 op2=2 -> op1=2
6292 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6293 */
6294 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6295 if (IEM_IS_MODRM_REG_MODE(bRm))
6296 {
6297 /* XMM, greg64 */
6298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6299 IEM_MC_BEGIN(0, 1);
6300 IEM_MC_LOCAL(uint64_t, u64Tmp);
6301
6302 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6303 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6304
6305 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6306 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6307
6308 IEM_MC_ADVANCE_RIP_AND_FINISH();
6309 IEM_MC_END();
6310 }
6311 else
6312 {
6313 /* XMM, [mem64] */
6314 IEM_MC_BEGIN(0, 2);
6315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6316 IEM_MC_LOCAL(uint64_t, u64Tmp);
6317
6318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6320 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6321 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6322
6323 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6324 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6325
6326 IEM_MC_ADVANCE_RIP_AND_FINISH();
6327 IEM_MC_END();
6328 }
6329 }
6330 else
6331 {
6332 /**
6333 * @opdone
6334 * @opcode 0x6e
6335 * @opcodesub rex.w=0
6336 * @oppfx 0x66
6337 * @opcpuid sse2
6338 * @opgroup og_sse2_simdint_datamove
6339 * @opxcpttype 5
6340 * @opfunction iemOp_movd_q_Vy_Ey
6341 * @optest op1=1 op2=2 -> op1=2
6342 * @optest op1=0 op2=-42 -> op1=-42
6343 */
6344 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6345 if (IEM_IS_MODRM_REG_MODE(bRm))
6346 {
6347 /* XMM, greg32 */
6348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6349 IEM_MC_BEGIN(0, 1);
6350 IEM_MC_LOCAL(uint32_t, u32Tmp);
6351
6352 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6353 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6354
6355 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6356 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6357
6358 IEM_MC_ADVANCE_RIP_AND_FINISH();
6359 IEM_MC_END();
6360 }
6361 else
6362 {
6363 /* XMM, [mem32] */
6364 IEM_MC_BEGIN(0, 2);
6365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6366 IEM_MC_LOCAL(uint32_t, u32Tmp);
6367
6368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6370 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6371 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6372
6373 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6374 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6375
6376 IEM_MC_ADVANCE_RIP_AND_FINISH();
6377 IEM_MC_END();
6378 }
6379 }
6380}
6381
6382/* Opcode 0xf3 0x0f 0x6e - invalid */
6383
6384
6385/**
6386 * @opcode 0x6f
6387 * @oppfx none
6388 * @opcpuid mmx
6389 * @opgroup og_mmx_datamove
6390 * @opxcpttype 5
6391 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6392 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6393 */
6394FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6395{
6396 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6397 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6398 if (IEM_IS_MODRM_REG_MODE(bRm))
6399 {
6400 /*
6401 * Register, register.
6402 */
6403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6404 IEM_MC_BEGIN(0, 1);
6405 IEM_MC_LOCAL(uint64_t, u64Tmp);
6406
6407 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6408 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6409 IEM_MC_FPU_TO_MMX_MODE();
6410
6411 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6412 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6413
6414 IEM_MC_ADVANCE_RIP_AND_FINISH();
6415 IEM_MC_END();
6416 }
6417 else
6418 {
6419 /*
6420 * Register, memory.
6421 */
6422 IEM_MC_BEGIN(0, 2);
6423 IEM_MC_LOCAL(uint64_t, u64Tmp);
6424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6425
6426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6428 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6429 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6430 IEM_MC_FPU_TO_MMX_MODE();
6431
6432 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6433 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6434
6435 IEM_MC_ADVANCE_RIP_AND_FINISH();
6436 IEM_MC_END();
6437 }
6438}
6439
6440/**
6441 * @opcode 0x6f
6442 * @oppfx 0x66
6443 * @opcpuid sse2
6444 * @opgroup og_sse2_simdint_datamove
6445 * @opxcpttype 1
6446 * @optest op1=1 op2=2 -> op1=2
6447 * @optest op1=0 op2=-42 -> op1=-42
6448 */
6449FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6450{
6451 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6452 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6453 if (IEM_IS_MODRM_REG_MODE(bRm))
6454 {
6455 /*
6456 * Register, register.
6457 */
6458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6459 IEM_MC_BEGIN(0, 0);
6460
6461 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6462 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6463
6464 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6465 IEM_GET_MODRM_RM(pVCpu, bRm));
6466 IEM_MC_ADVANCE_RIP_AND_FINISH();
6467 IEM_MC_END();
6468 }
6469 else
6470 {
6471 /*
6472 * Register, memory.
6473 */
6474 IEM_MC_BEGIN(0, 2);
6475 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6477
6478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6480 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6481 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6482
6483 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6484 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6485
6486 IEM_MC_ADVANCE_RIP_AND_FINISH();
6487 IEM_MC_END();
6488 }
6489}
6490
6491/**
6492 * @opcode 0x6f
6493 * @oppfx 0xf3
6494 * @opcpuid sse2
6495 * @opgroup og_sse2_simdint_datamove
6496 * @opxcpttype 4UA
6497 * @optest op1=1 op2=2 -> op1=2
6498 * @optest op1=0 op2=-42 -> op1=-42
6499 */
6500FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6501{
6502 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6504 if (IEM_IS_MODRM_REG_MODE(bRm))
6505 {
6506 /*
6507 * Register, register.
6508 */
6509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6510 IEM_MC_BEGIN(0, 0);
6511 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6512 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6513 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6514 IEM_GET_MODRM_RM(pVCpu, bRm));
6515 IEM_MC_ADVANCE_RIP_AND_FINISH();
6516 IEM_MC_END();
6517 }
6518 else
6519 {
6520 /*
6521 * Register, memory.
6522 */
6523 IEM_MC_BEGIN(0, 2);
6524 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6526
6527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6529 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6530 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6531 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6532 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6533
6534 IEM_MC_ADVANCE_RIP_AND_FINISH();
6535 IEM_MC_END();
6536 }
6537}
6538
6539
6540/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6541FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6542{
6543 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6544 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6545 if (IEM_IS_MODRM_REG_MODE(bRm))
6546 {
6547 /*
6548 * Register, register.
6549 */
6550 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6552
6553 IEM_MC_BEGIN(3, 0);
6554 IEM_MC_ARG(uint64_t *, pDst, 0);
6555 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6556 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6557 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6558 IEM_MC_PREPARE_FPU_USAGE();
6559 IEM_MC_FPU_TO_MMX_MODE();
6560
6561 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6562 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6563 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6564 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6565
6566 IEM_MC_ADVANCE_RIP_AND_FINISH();
6567 IEM_MC_END();
6568 }
6569 else
6570 {
6571 /*
6572 * Register, memory.
6573 */
6574 IEM_MC_BEGIN(3, 2);
6575 IEM_MC_ARG(uint64_t *, pDst, 0);
6576 IEM_MC_LOCAL(uint64_t, uSrc);
6577 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6579
6580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6581 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6582 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6584 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6585 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6586
6587 IEM_MC_PREPARE_FPU_USAGE();
6588 IEM_MC_FPU_TO_MMX_MODE();
6589
6590 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6591 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6592 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6593
6594 IEM_MC_ADVANCE_RIP_AND_FINISH();
6595 IEM_MC_END();
6596 }
6597}
6598
6599
6600/**
6601 * Common worker for SSE2 instructions on the forms:
6602 * pshufd xmm1, xmm2/mem128, imm8
6603 * pshufhw xmm1, xmm2/mem128, imm8
6604 * pshuflw xmm1, xmm2/mem128, imm8
6605 *
6606 * Proper alignment of the 128-bit operand is enforced.
6607 * Exceptions type 4. SSE2 cpuid checks.
6608 */
6609FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6610{
6611 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6612 if (IEM_IS_MODRM_REG_MODE(bRm))
6613 {
6614 /*
6615 * Register, register.
6616 */
6617 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6619
6620 IEM_MC_BEGIN(3, 0);
6621 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6622 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6623 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6624 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6625 IEM_MC_PREPARE_SSE_USAGE();
6626 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6627 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6628 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6629 IEM_MC_ADVANCE_RIP_AND_FINISH();
6630 IEM_MC_END();
6631 }
6632 else
6633 {
6634 /*
6635 * Register, memory.
6636 */
6637 IEM_MC_BEGIN(3, 2);
6638 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6639 IEM_MC_LOCAL(RTUINT128U, uSrc);
6640 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6642
6643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6644 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6645 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6647 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6648
6649 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6650 IEM_MC_PREPARE_SSE_USAGE();
6651 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6652 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6653
6654 IEM_MC_ADVANCE_RIP_AND_FINISH();
6655 IEM_MC_END();
6656 }
6657}
6658
6659
6660/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6661FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6662{
6663 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6664 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6665}
6666
6667
6668/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6669FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6670{
6671 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6672 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6673}
6674
6675
6676/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6677FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6678{
6679 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6680 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6681}
6682
6683
6684/**
6685 * Common worker for MMX instructions of the form:
6686 * psrlw mm, imm8
6687 * psraw mm, imm8
6688 * psllw mm, imm8
6689 * psrld mm, imm8
6690 * psrad mm, imm8
6691 * pslld mm, imm8
6692 * psrlq mm, imm8
6693 * psllq mm, imm8
6694 *
6695 */
6696FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6697{
6698 if (IEM_IS_MODRM_REG_MODE(bRm))
6699 {
6700 /*
6701 * Register, immediate.
6702 */
6703 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6705
6706 IEM_MC_BEGIN(2, 0);
6707 IEM_MC_ARG(uint64_t *, pDst, 0);
6708 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6709 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6710 IEM_MC_PREPARE_FPU_USAGE();
6711 IEM_MC_FPU_TO_MMX_MODE();
6712
6713 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6714 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6715 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6716
6717 IEM_MC_ADVANCE_RIP_AND_FINISH();
6718 IEM_MC_END();
6719 }
6720 else
6721 {
6722 /*
6723 * Register, memory not supported.
6724 */
6725 /// @todo Caller already enforced register mode?!
6726 AssertFailedReturn(VINF_SUCCESS);
6727 }
6728}
6729
6730
6731/**
6732 * Common worker for SSE2 instructions of the form:
6733 * psrlw xmm, imm8
6734 * psraw xmm, imm8
6735 * psllw xmm, imm8
6736 * psrld xmm, imm8
6737 * psrad xmm, imm8
6738 * pslld xmm, imm8
6739 * psrlq xmm, imm8
6740 * psllq xmm, imm8
6741 *
6742 */
6743FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6744{
6745 if (IEM_IS_MODRM_REG_MODE(bRm))
6746 {
6747 /*
6748 * Register, immediate.
6749 */
6750 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6752
6753 IEM_MC_BEGIN(2, 0);
6754 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6755 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6756 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6757 IEM_MC_PREPARE_SSE_USAGE();
6758 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6759 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6760 IEM_MC_ADVANCE_RIP_AND_FINISH();
6761 IEM_MC_END();
6762 }
6763 else
6764 {
6765 /*
6766 * Register, memory.
6767 */
6768 /// @todo Caller already enforced register mode?!
6769 AssertFailedReturn(VINF_SUCCESS);
6770 }
6771}
6772
6773
6774/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6775FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6776{
6777// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6778 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6779}
6780
6781
6782/** Opcode 0x66 0x0f 0x71 11/2. */
6783FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6784{
6785// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6786 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6787}
6788
6789
6790/** Opcode 0x0f 0x71 11/4. */
6791FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6792{
6793// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6794 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6795}
6796
6797
6798/** Opcode 0x66 0x0f 0x71 11/4. */
6799FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6800{
6801// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6802 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6803}
6804
6805
6806/** Opcode 0x0f 0x71 11/6. */
6807FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6808{
6809// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6810 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6811}
6812
6813
6814/** Opcode 0x66 0x0f 0x71 11/6. */
6815FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6816{
6817// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6818 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6819}
6820
6821
6822/**
6823 * Group 12 jump table for register variant.
6824 */
6825IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6826{
6827 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6828 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6829 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6830 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6831 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6832 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6833 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6834 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6835};
6836AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6837
6838
6839/** Opcode 0x0f 0x71. */
6840FNIEMOP_DEF(iemOp_Grp12)
6841{
6842 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6843 if (IEM_IS_MODRM_REG_MODE(bRm))
6844 /* register, register */
6845 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6846 + pVCpu->iem.s.idxPrefix], bRm);
6847 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6848}
6849
6850
6851/** Opcode 0x0f 0x72 11/2. */
6852FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6853{
6854// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6855 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6856}
6857
6858
6859/** Opcode 0x66 0x0f 0x72 11/2. */
6860FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6861{
6862// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6863 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6864}
6865
6866
6867/** Opcode 0x0f 0x72 11/4. */
6868FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6869{
6870// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6871 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6872}
6873
6874
6875/** Opcode 0x66 0x0f 0x72 11/4. */
6876FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6877{
6878// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6879 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6880}
6881
6882
6883/** Opcode 0x0f 0x72 11/6. */
6884FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6885{
6886// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6887 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6888}
6889
6890/** Opcode 0x66 0x0f 0x72 11/6. */
6891FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6892{
6893// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6894 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6895}
6896
6897
6898/**
6899 * Group 13 jump table for register variant.
6900 */
6901IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6902{
6903 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6904 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6905 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6906 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6907 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6908 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6909 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6910 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6911};
6912AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6913
6914/** Opcode 0x0f 0x72. */
6915FNIEMOP_DEF(iemOp_Grp13)
6916{
6917 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6918 if (IEM_IS_MODRM_REG_MODE(bRm))
6919 /* register, register */
6920 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6921 + pVCpu->iem.s.idxPrefix], bRm);
6922 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6923}
6924
6925
6926/** Opcode 0x0f 0x73 11/2. */
6927FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6928{
6929// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6930 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6931}
6932
6933
6934/** Opcode 0x66 0x0f 0x73 11/2. */
6935FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6936{
6937// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6938 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6939}
6940
6941
6942/** Opcode 0x66 0x0f 0x73 11/3. */
6943FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6944{
6945// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6946 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6947}
6948
6949
6950/** Opcode 0x0f 0x73 11/6. */
6951FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6952{
6953// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6954 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6955}
6956
6957
6958/** Opcode 0x66 0x0f 0x73 11/6. */
6959FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6960{
6961// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6962 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6963}
6964
6965
6966/** Opcode 0x66 0x0f 0x73 11/7. */
6967FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6968{
6969// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6970 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6971}
6972
6973/**
6974 * Group 14 jump table for register variant.
6975 */
6976IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6977{
6978 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6979 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6980 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6981 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6982 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6983 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6984 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6985 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6986};
6987AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6988
6989
6990/** Opcode 0x0f 0x73. */
6991FNIEMOP_DEF(iemOp_Grp14)
6992{
6993 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6994 if (IEM_IS_MODRM_REG_MODE(bRm))
6995 /* register, register */
6996 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6997 + pVCpu->iem.s.idxPrefix], bRm);
6998 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6999}
7000
7001
7002/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
7003FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
7004{
7005 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7006 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
7007}
7008
7009
7010/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
7011FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
7012{
7013 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7014 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
7015}
7016
7017
7018/* Opcode 0xf3 0x0f 0x74 - invalid */
7019/* Opcode 0xf2 0x0f 0x74 - invalid */
7020
7021
7022/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
7023FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
7024{
7025 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7026 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
7027}
7028
7029
7030/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
7031FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
7032{
7033 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7034 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
7035}
7036
7037
7038/* Opcode 0xf3 0x0f 0x75 - invalid */
7039/* Opcode 0xf2 0x0f 0x75 - invalid */
7040
7041
7042/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
7043FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
7044{
7045 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7046 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
7047}
7048
7049
7050/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
7051FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
7052{
7053 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7054 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
7055}
7056
7057
7058/* Opcode 0xf3 0x0f 0x76 - invalid */
7059/* Opcode 0xf2 0x0f 0x76 - invalid */
7060
7061
7062/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
7063FNIEMOP_DEF(iemOp_emms)
7064{
7065 IEMOP_MNEMONIC(emms, "emms");
7066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7067
7068 IEM_MC_BEGIN(0,0);
7069 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7070 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7071 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7072 IEM_MC_FPU_FROM_MMX_MODE();
7073 IEM_MC_ADVANCE_RIP_AND_FINISH();
7074 IEM_MC_END();
7075}
7076
7077/* Opcode 0x66 0x0f 0x77 - invalid */
7078/* Opcode 0xf3 0x0f 0x77 - invalid */
7079/* Opcode 0xf2 0x0f 0x77 - invalid */
7080
7081/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
7082#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7083FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
7084{
7085 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
7086 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
7087 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
7088 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
7089
7090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7091 if (IEM_IS_MODRM_REG_MODE(bRm))
7092 {
7093 /*
7094 * Register, register.
7095 */
7096 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7097 if (enmEffOpSize == IEMMODE_64BIT)
7098 {
7099 IEM_MC_BEGIN(2, 0);
7100 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7101 IEM_MC_ARG(uint64_t, u64Enc, 1);
7102 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7103 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7104 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
7105 IEM_MC_END();
7106 }
7107 else
7108 {
7109 IEM_MC_BEGIN(2, 0);
7110 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7111 IEM_MC_ARG(uint32_t, u32Enc, 1);
7112 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7113 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7114 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
7115 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7116 IEM_MC_END();
7117 }
7118 }
7119 else
7120 {
7121 /*
7122 * Memory, register.
7123 */
7124 if (enmEffOpSize == IEMMODE_64BIT)
7125 {
7126 IEM_MC_BEGIN(3, 0);
7127 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7128 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7129 IEM_MC_ARG(uint64_t, u64Enc, 2);
7130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7131 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7132 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7133 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7134 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7135 IEM_MC_END();
7136 }
7137 else
7138 {
7139 IEM_MC_BEGIN(3, 0);
7140 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7141 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7142 IEM_MC_ARG(uint32_t, u32Enc, 2);
7143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7144 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7145 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7146 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7147 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7148 IEM_MC_END();
7149 }
7150 }
7151 return VINF_SUCCESS;
7152}
7153#else
7154FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
7155#endif
7156
7157/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7158FNIEMOP_STUB(iemOp_AmdGrp17);
7159/* Opcode 0xf3 0x0f 0x78 - invalid */
7160/* Opcode 0xf2 0x0f 0x78 - invalid */
7161
7162/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7163#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7164FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7165{
7166 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7167 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7168 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7169 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
7170
7171 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7172 if (IEM_IS_MODRM_REG_MODE(bRm))
7173 {
7174 /*
7175 * Register, register.
7176 */
7177 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7178 if (enmEffOpSize == IEMMODE_64BIT)
7179 {
7180 IEM_MC_BEGIN(2, 0);
7181 IEM_MC_ARG(uint64_t, u64Val, 0);
7182 IEM_MC_ARG(uint64_t, u64Enc, 1);
7183 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7184 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7185 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
7186 IEM_MC_END();
7187 }
7188 else
7189 {
7190 IEM_MC_BEGIN(2, 0);
7191 IEM_MC_ARG(uint32_t, u32Val, 0);
7192 IEM_MC_ARG(uint32_t, u32Enc, 1);
7193 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7194 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7195 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
7196 IEM_MC_END();
7197 }
7198 }
7199 else
7200 {
7201 /*
7202 * Register, memory.
7203 */
7204 if (enmEffOpSize == IEMMODE_64BIT)
7205 {
7206 IEM_MC_BEGIN(3, 0);
7207 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7208 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7209 IEM_MC_ARG(uint64_t, u64Enc, 2);
7210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7211 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7212 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7213 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7214 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7215 IEM_MC_END();
7216 }
7217 else
7218 {
7219 IEM_MC_BEGIN(3, 0);
7220 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7221 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7222 IEM_MC_ARG(uint32_t, u32Enc, 2);
7223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7224 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7225 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7226 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7227 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7228 IEM_MC_END();
7229 }
7230 }
7231 return VINF_SUCCESS;
7232}
7233#else
7234FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
7235#endif
7236/* Opcode 0x66 0x0f 0x79 - invalid */
7237/* Opcode 0xf3 0x0f 0x79 - invalid */
7238/* Opcode 0xf2 0x0f 0x79 - invalid */
7239
7240/* Opcode 0x0f 0x7a - invalid */
7241/* Opcode 0x66 0x0f 0x7a - invalid */
7242/* Opcode 0xf3 0x0f 0x7a - invalid */
7243/* Opcode 0xf2 0x0f 0x7a - invalid */
7244
7245/* Opcode 0x0f 0x7b - invalid */
7246/* Opcode 0x66 0x0f 0x7b - invalid */
7247/* Opcode 0xf3 0x0f 0x7b - invalid */
7248/* Opcode 0xf2 0x0f 0x7b - invalid */
7249
7250/* Opcode 0x0f 0x7c - invalid */
7251
7252
7253/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7254FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7255{
7256 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7257 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7258}
7259
7260
7261/* Opcode 0xf3 0x0f 0x7c - invalid */
7262
7263
7264/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7265FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7266{
7267 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7268 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7269}
7270
7271
7272/* Opcode 0x0f 0x7d - invalid */
7273
7274
7275/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7276FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7277{
7278 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7279 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7280}
7281
7282
7283/* Opcode 0xf3 0x0f 0x7d - invalid */
7284
7285
7286/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7287FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7288{
7289 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7290 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7291}
7292
7293
7294/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7295FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7296{
7297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7298 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7299 {
7300 /**
7301 * @opcode 0x7e
7302 * @opcodesub rex.w=1
7303 * @oppfx none
7304 * @opcpuid mmx
7305 * @opgroup og_mmx_datamove
7306 * @opxcpttype 5
7307 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7308 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7309 */
7310 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7311 if (IEM_IS_MODRM_REG_MODE(bRm))
7312 {
7313 /* greg64, MMX */
7314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7315 IEM_MC_BEGIN(0, 1);
7316 IEM_MC_LOCAL(uint64_t, u64Tmp);
7317
7318 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7319 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7320 IEM_MC_FPU_TO_MMX_MODE();
7321
7322 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7323 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7324
7325 IEM_MC_ADVANCE_RIP_AND_FINISH();
7326 IEM_MC_END();
7327 }
7328 else
7329 {
7330 /* [mem64], MMX */
7331 IEM_MC_BEGIN(0, 2);
7332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7333 IEM_MC_LOCAL(uint64_t, u64Tmp);
7334
7335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7337 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7338 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7339 IEM_MC_FPU_TO_MMX_MODE();
7340
7341 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7342 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7343
7344 IEM_MC_ADVANCE_RIP_AND_FINISH();
7345 IEM_MC_END();
7346 }
7347 }
7348 else
7349 {
7350 /**
7351 * @opdone
7352 * @opcode 0x7e
7353 * @opcodesub rex.w=0
7354 * @oppfx none
7355 * @opcpuid mmx
7356 * @opgroup og_mmx_datamove
7357 * @opxcpttype 5
7358 * @opfunction iemOp_movd_q_Pd_Ey
7359 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7360 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7361 */
7362 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7363 if (IEM_IS_MODRM_REG_MODE(bRm))
7364 {
7365 /* greg32, MMX */
7366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7367 IEM_MC_BEGIN(0, 1);
7368 IEM_MC_LOCAL(uint32_t, u32Tmp);
7369
7370 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7371 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7372 IEM_MC_FPU_TO_MMX_MODE();
7373
7374 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7375 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7376
7377 IEM_MC_ADVANCE_RIP_AND_FINISH();
7378 IEM_MC_END();
7379 }
7380 else
7381 {
7382 /* [mem32], MMX */
7383 IEM_MC_BEGIN(0, 2);
7384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7385 IEM_MC_LOCAL(uint32_t, u32Tmp);
7386
7387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7389 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7390 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7391 IEM_MC_FPU_TO_MMX_MODE();
7392
7393 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7394 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7395
7396 IEM_MC_ADVANCE_RIP_AND_FINISH();
7397 IEM_MC_END();
7398 }
7399 }
7400}
7401
7402
7403FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7404{
7405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7406 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7407 {
7408 /**
7409 * @opcode 0x7e
7410 * @opcodesub rex.w=1
7411 * @oppfx 0x66
7412 * @opcpuid sse2
7413 * @opgroup og_sse2_simdint_datamove
7414 * @opxcpttype 5
7415 * @optest 64-bit / op1=1 op2=2 -> op1=2
7416 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7417 */
7418 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7419 if (IEM_IS_MODRM_REG_MODE(bRm))
7420 {
7421 /* greg64, XMM */
7422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7423 IEM_MC_BEGIN(0, 1);
7424 IEM_MC_LOCAL(uint64_t, u64Tmp);
7425
7426 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7427 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7428
7429 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7430 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7431
7432 IEM_MC_ADVANCE_RIP_AND_FINISH();
7433 IEM_MC_END();
7434 }
7435 else
7436 {
7437 /* [mem64], XMM */
7438 IEM_MC_BEGIN(0, 2);
7439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7440 IEM_MC_LOCAL(uint64_t, u64Tmp);
7441
7442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7444 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7445 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7446
7447 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7448 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7449
7450 IEM_MC_ADVANCE_RIP_AND_FINISH();
7451 IEM_MC_END();
7452 }
7453 }
7454 else
7455 {
7456 /**
7457 * @opdone
7458 * @opcode 0x7e
7459 * @opcodesub rex.w=0
7460 * @oppfx 0x66
7461 * @opcpuid sse2
7462 * @opgroup og_sse2_simdint_datamove
7463 * @opxcpttype 5
7464 * @opfunction iemOp_movd_q_Vy_Ey
7465 * @optest op1=1 op2=2 -> op1=2
7466 * @optest op1=0 op2=-42 -> op1=-42
7467 */
7468 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7469 if (IEM_IS_MODRM_REG_MODE(bRm))
7470 {
7471 /* greg32, XMM */
7472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7473 IEM_MC_BEGIN(0, 1);
7474 IEM_MC_LOCAL(uint32_t, u32Tmp);
7475
7476 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7477 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7478
7479 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7480 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7481
7482 IEM_MC_ADVANCE_RIP_AND_FINISH();
7483 IEM_MC_END();
7484 }
7485 else
7486 {
7487 /* [mem32], XMM */
7488 IEM_MC_BEGIN(0, 2);
7489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7490 IEM_MC_LOCAL(uint32_t, u32Tmp);
7491
7492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7494 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7495 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7496
7497 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7498 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7499
7500 IEM_MC_ADVANCE_RIP_AND_FINISH();
7501 IEM_MC_END();
7502 }
7503 }
7504}
7505
7506/**
7507 * @opcode 0x7e
7508 * @oppfx 0xf3
7509 * @opcpuid sse2
7510 * @opgroup og_sse2_pcksclr_datamove
7511 * @opxcpttype none
7512 * @optest op1=1 op2=2 -> op1=2
7513 * @optest op1=0 op2=-42 -> op1=-42
7514 */
7515FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7516{
7517 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7518 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7519 if (IEM_IS_MODRM_REG_MODE(bRm))
7520 {
7521 /*
7522 * XMM128, XMM64.
7523 */
7524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7525 IEM_MC_BEGIN(0, 2);
7526 IEM_MC_LOCAL(uint64_t, uSrc);
7527
7528 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7529 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7530
7531 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
7532 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7533
7534 IEM_MC_ADVANCE_RIP_AND_FINISH();
7535 IEM_MC_END();
7536 }
7537 else
7538 {
7539 /*
7540 * XMM128, [mem64].
7541 */
7542 IEM_MC_BEGIN(0, 2);
7543 IEM_MC_LOCAL(uint64_t, uSrc);
7544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7545
7546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7548 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7549 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7550
7551 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7552 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7553
7554 IEM_MC_ADVANCE_RIP_AND_FINISH();
7555 IEM_MC_END();
7556 }
7557}
7558
7559/* Opcode 0xf2 0x0f 0x7e - invalid */
7560
7561
7562/** Opcode 0x0f 0x7f - movq Qq, Pq */
7563FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7564{
7565 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7567 if (IEM_IS_MODRM_REG_MODE(bRm))
7568 {
7569 /*
7570 * MMX, MMX.
7571 */
7572 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7573 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7575 IEM_MC_BEGIN(0, 1);
7576 IEM_MC_LOCAL(uint64_t, u64Tmp);
7577 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7578 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7579 IEM_MC_FPU_TO_MMX_MODE();
7580
7581 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7582 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7583
7584 IEM_MC_ADVANCE_RIP_AND_FINISH();
7585 IEM_MC_END();
7586 }
7587 else
7588 {
7589 /*
7590 * [mem64], MMX.
7591 */
7592 IEM_MC_BEGIN(0, 2);
7593 IEM_MC_LOCAL(uint64_t, u64Tmp);
7594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7595
7596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7598 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7599 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7600 IEM_MC_FPU_TO_MMX_MODE();
7601
7602 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7603 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7604
7605 IEM_MC_ADVANCE_RIP_AND_FINISH();
7606 IEM_MC_END();
7607 }
7608}
7609
7610/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7611FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7612{
7613 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7614 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7615 if (IEM_IS_MODRM_REG_MODE(bRm))
7616 {
7617 /*
7618 * XMM, XMM.
7619 */
7620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7621 IEM_MC_BEGIN(0, 0);
7622 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7623 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7624 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7625 IEM_GET_MODRM_REG(pVCpu, bRm));
7626 IEM_MC_ADVANCE_RIP_AND_FINISH();
7627 IEM_MC_END();
7628 }
7629 else
7630 {
7631 /*
7632 * [mem128], XMM.
7633 */
7634 IEM_MC_BEGIN(0, 2);
7635 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7636 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7637
7638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7640 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7641 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7642
7643 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7644 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7645
7646 IEM_MC_ADVANCE_RIP_AND_FINISH();
7647 IEM_MC_END();
7648 }
7649}
7650
7651/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7652FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7653{
7654 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7655 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7656 if (IEM_IS_MODRM_REG_MODE(bRm))
7657 {
7658 /*
7659 * XMM, XMM.
7660 */
7661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7662 IEM_MC_BEGIN(0, 0);
7663 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7664 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7665 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7666 IEM_GET_MODRM_REG(pVCpu, bRm));
7667 IEM_MC_ADVANCE_RIP_AND_FINISH();
7668 IEM_MC_END();
7669 }
7670 else
7671 {
7672 /*
7673 * [mem128], XMM.
7674 */
7675 IEM_MC_BEGIN(0, 2);
7676 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7678
7679 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7681 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7682 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7683
7684 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7685 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7686
7687 IEM_MC_ADVANCE_RIP_AND_FINISH();
7688 IEM_MC_END();
7689 }
7690}
7691
7692/* Opcode 0xf2 0x0f 0x7f - invalid */
7693
7694
7695
7696/** Opcode 0x0f 0x80. */
7697FNIEMOP_DEF(iemOp_jo_Jv)
7698{
7699 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7700 IEMOP_HLP_MIN_386();
7701 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7702 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7703 {
7704 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7706
7707 IEM_MC_BEGIN(0, 0);
7708 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7709 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7710 } IEM_MC_ELSE() {
7711 IEM_MC_ADVANCE_RIP_AND_FINISH();
7712 } IEM_MC_ENDIF();
7713 IEM_MC_END();
7714 }
7715 else
7716 {
7717 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7719
7720 IEM_MC_BEGIN(0, 0);
7721 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7722 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7723 } IEM_MC_ELSE() {
7724 IEM_MC_ADVANCE_RIP_AND_FINISH();
7725 } IEM_MC_ENDIF();
7726 IEM_MC_END();
7727 }
7728}
7729
7730
7731/** Opcode 0x0f 0x81. */
7732FNIEMOP_DEF(iemOp_jno_Jv)
7733{
7734 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7735 IEMOP_HLP_MIN_386();
7736 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7737 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7738 {
7739 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7741
7742 IEM_MC_BEGIN(0, 0);
7743 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7744 IEM_MC_ADVANCE_RIP_AND_FINISH();
7745 } IEM_MC_ELSE() {
7746 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7747 } IEM_MC_ENDIF();
7748 IEM_MC_END();
7749 }
7750 else
7751 {
7752 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7754
7755 IEM_MC_BEGIN(0, 0);
7756 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7757 IEM_MC_ADVANCE_RIP_AND_FINISH();
7758 } IEM_MC_ELSE() {
7759 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7760 } IEM_MC_ENDIF();
7761 IEM_MC_END();
7762 }
7763}
7764
7765
7766/** Opcode 0x0f 0x82. */
7767FNIEMOP_DEF(iemOp_jc_Jv)
7768{
7769 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7770 IEMOP_HLP_MIN_386();
7771 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7772 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7773 {
7774 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7776
7777 IEM_MC_BEGIN(0, 0);
7778 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7779 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7780 } IEM_MC_ELSE() {
7781 IEM_MC_ADVANCE_RIP_AND_FINISH();
7782 } IEM_MC_ENDIF();
7783 IEM_MC_END();
7784 }
7785 else
7786 {
7787 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7789
7790 IEM_MC_BEGIN(0, 0);
7791 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7792 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7793 } IEM_MC_ELSE() {
7794 IEM_MC_ADVANCE_RIP_AND_FINISH();
7795 } IEM_MC_ENDIF();
7796 IEM_MC_END();
7797 }
7798}
7799
7800
7801/** Opcode 0x0f 0x83. */
7802FNIEMOP_DEF(iemOp_jnc_Jv)
7803{
7804 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7805 IEMOP_HLP_MIN_386();
7806 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7807 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7808 {
7809 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7811
7812 IEM_MC_BEGIN(0, 0);
7813 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7814 IEM_MC_ADVANCE_RIP_AND_FINISH();
7815 } IEM_MC_ELSE() {
7816 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7817 } IEM_MC_ENDIF();
7818 IEM_MC_END();
7819 }
7820 else
7821 {
7822 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7824
7825 IEM_MC_BEGIN(0, 0);
7826 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7827 IEM_MC_ADVANCE_RIP_AND_FINISH();
7828 } IEM_MC_ELSE() {
7829 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7830 } IEM_MC_ENDIF();
7831 IEM_MC_END();
7832 }
7833}
7834
7835
7836/** Opcode 0x0f 0x84. */
7837FNIEMOP_DEF(iemOp_je_Jv)
7838{
7839 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7840 IEMOP_HLP_MIN_386();
7841 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7842 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7843 {
7844 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7846
7847 IEM_MC_BEGIN(0, 0);
7848 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7849 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7850 } IEM_MC_ELSE() {
7851 IEM_MC_ADVANCE_RIP_AND_FINISH();
7852 } IEM_MC_ENDIF();
7853 IEM_MC_END();
7854 }
7855 else
7856 {
7857 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7859
7860 IEM_MC_BEGIN(0, 0);
7861 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7862 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7863 } IEM_MC_ELSE() {
7864 IEM_MC_ADVANCE_RIP_AND_FINISH();
7865 } IEM_MC_ENDIF();
7866 IEM_MC_END();
7867 }
7868}
7869
7870
7871/** Opcode 0x0f 0x85. */
7872FNIEMOP_DEF(iemOp_jne_Jv)
7873{
7874 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7875 IEMOP_HLP_MIN_386();
7876 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7877 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7878 {
7879 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7881
7882 IEM_MC_BEGIN(0, 0);
7883 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7884 IEM_MC_ADVANCE_RIP_AND_FINISH();
7885 } IEM_MC_ELSE() {
7886 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7887 } IEM_MC_ENDIF();
7888 IEM_MC_END();
7889 }
7890 else
7891 {
7892 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7894
7895 IEM_MC_BEGIN(0, 0);
7896 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7897 IEM_MC_ADVANCE_RIP_AND_FINISH();
7898 } IEM_MC_ELSE() {
7899 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7900 } IEM_MC_ENDIF();
7901 IEM_MC_END();
7902 }
7903}
7904
7905
7906/** Opcode 0x0f 0x86. */
7907FNIEMOP_DEF(iemOp_jbe_Jv)
7908{
7909 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7910 IEMOP_HLP_MIN_386();
7911 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7912 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7913 {
7914 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7916
7917 IEM_MC_BEGIN(0, 0);
7918 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7919 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7920 } IEM_MC_ELSE() {
7921 IEM_MC_ADVANCE_RIP_AND_FINISH();
7922 } IEM_MC_ENDIF();
7923 IEM_MC_END();
7924 }
7925 else
7926 {
7927 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7929
7930 IEM_MC_BEGIN(0, 0);
7931 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7932 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7933 } IEM_MC_ELSE() {
7934 IEM_MC_ADVANCE_RIP_AND_FINISH();
7935 } IEM_MC_ENDIF();
7936 IEM_MC_END();
7937 }
7938}
7939
7940
7941/** Opcode 0x0f 0x87. */
7942FNIEMOP_DEF(iemOp_jnbe_Jv)
7943{
7944 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7945 IEMOP_HLP_MIN_386();
7946 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7947 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7948 {
7949 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7951
7952 IEM_MC_BEGIN(0, 0);
7953 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7954 IEM_MC_ADVANCE_RIP_AND_FINISH();
7955 } IEM_MC_ELSE() {
7956 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7957 } IEM_MC_ENDIF();
7958 IEM_MC_END();
7959 }
7960 else
7961 {
7962 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7964
7965 IEM_MC_BEGIN(0, 0);
7966 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7967 IEM_MC_ADVANCE_RIP_AND_FINISH();
7968 } IEM_MC_ELSE() {
7969 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7970 } IEM_MC_ENDIF();
7971 IEM_MC_END();
7972 }
7973}
7974
7975
7976/** Opcode 0x0f 0x88. */
7977FNIEMOP_DEF(iemOp_js_Jv)
7978{
7979 IEMOP_MNEMONIC(js_Jv, "js Jv");
7980 IEMOP_HLP_MIN_386();
7981 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7982 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7983 {
7984 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7986
7987 IEM_MC_BEGIN(0, 0);
7988 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7989 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7990 } IEM_MC_ELSE() {
7991 IEM_MC_ADVANCE_RIP_AND_FINISH();
7992 } IEM_MC_ENDIF();
7993 IEM_MC_END();
7994 }
7995 else
7996 {
7997 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7999
8000 IEM_MC_BEGIN(0, 0);
8001 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8002 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8003 } IEM_MC_ELSE() {
8004 IEM_MC_ADVANCE_RIP_AND_FINISH();
8005 } IEM_MC_ENDIF();
8006 IEM_MC_END();
8007 }
8008}
8009
8010
8011/** Opcode 0x0f 0x89. */
8012FNIEMOP_DEF(iemOp_jns_Jv)
8013{
8014 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
8015 IEMOP_HLP_MIN_386();
8016 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8017 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8018 {
8019 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8021
8022 IEM_MC_BEGIN(0, 0);
8023 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8024 IEM_MC_ADVANCE_RIP_AND_FINISH();
8025 } IEM_MC_ELSE() {
8026 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8027 } IEM_MC_ENDIF();
8028 IEM_MC_END();
8029 }
8030 else
8031 {
8032 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8034
8035 IEM_MC_BEGIN(0, 0);
8036 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8037 IEM_MC_ADVANCE_RIP_AND_FINISH();
8038 } IEM_MC_ELSE() {
8039 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8040 } IEM_MC_ENDIF();
8041 IEM_MC_END();
8042 }
8043}
8044
8045
8046/** Opcode 0x0f 0x8a. */
8047FNIEMOP_DEF(iemOp_jp_Jv)
8048{
8049 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
8050 IEMOP_HLP_MIN_386();
8051 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8052 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8053 {
8054 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8056
8057 IEM_MC_BEGIN(0, 0);
8058 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8059 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8060 } IEM_MC_ELSE() {
8061 IEM_MC_ADVANCE_RIP_AND_FINISH();
8062 } IEM_MC_ENDIF();
8063 IEM_MC_END();
8064 }
8065 else
8066 {
8067 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8069
8070 IEM_MC_BEGIN(0, 0);
8071 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8072 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8073 } IEM_MC_ELSE() {
8074 IEM_MC_ADVANCE_RIP_AND_FINISH();
8075 } IEM_MC_ENDIF();
8076 IEM_MC_END();
8077 }
8078}
8079
8080
8081/** Opcode 0x0f 0x8b. */
8082FNIEMOP_DEF(iemOp_jnp_Jv)
8083{
8084 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
8085 IEMOP_HLP_MIN_386();
8086 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8087 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8088 {
8089 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8091
8092 IEM_MC_BEGIN(0, 0);
8093 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8094 IEM_MC_ADVANCE_RIP_AND_FINISH();
8095 } IEM_MC_ELSE() {
8096 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8097 } IEM_MC_ENDIF();
8098 IEM_MC_END();
8099 }
8100 else
8101 {
8102 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8104
8105 IEM_MC_BEGIN(0, 0);
8106 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8107 IEM_MC_ADVANCE_RIP_AND_FINISH();
8108 } IEM_MC_ELSE() {
8109 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8110 } IEM_MC_ENDIF();
8111 IEM_MC_END();
8112 }
8113}
8114
8115
8116/** Opcode 0x0f 0x8c. */
8117FNIEMOP_DEF(iemOp_jl_Jv)
8118{
8119 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8120 IEMOP_HLP_MIN_386();
8121 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8122 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8123 {
8124 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8126
8127 IEM_MC_BEGIN(0, 0);
8128 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8129 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8130 } IEM_MC_ELSE() {
8131 IEM_MC_ADVANCE_RIP_AND_FINISH();
8132 } IEM_MC_ENDIF();
8133 IEM_MC_END();
8134 }
8135 else
8136 {
8137 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8139
8140 IEM_MC_BEGIN(0, 0);
8141 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8142 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8143 } IEM_MC_ELSE() {
8144 IEM_MC_ADVANCE_RIP_AND_FINISH();
8145 } IEM_MC_ENDIF();
8146 IEM_MC_END();
8147 }
8148}
8149
8150
8151/** Opcode 0x0f 0x8d. */
8152FNIEMOP_DEF(iemOp_jnl_Jv)
8153{
8154 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8155 IEMOP_HLP_MIN_386();
8156 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8157 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8158 {
8159 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8161
8162 IEM_MC_BEGIN(0, 0);
8163 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8164 IEM_MC_ADVANCE_RIP_AND_FINISH();
8165 } IEM_MC_ELSE() {
8166 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8167 } IEM_MC_ENDIF();
8168 IEM_MC_END();
8169 }
8170 else
8171 {
8172 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8174
8175 IEM_MC_BEGIN(0, 0);
8176 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8177 IEM_MC_ADVANCE_RIP_AND_FINISH();
8178 } IEM_MC_ELSE() {
8179 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8180 } IEM_MC_ENDIF();
8181 IEM_MC_END();
8182 }
8183}
8184
8185
8186/** Opcode 0x0f 0x8e. */
8187FNIEMOP_DEF(iemOp_jle_Jv)
8188{
8189 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8190 IEMOP_HLP_MIN_386();
8191 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8192 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8193 {
8194 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8196
8197 IEM_MC_BEGIN(0, 0);
8198 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8199 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8200 } IEM_MC_ELSE() {
8201 IEM_MC_ADVANCE_RIP_AND_FINISH();
8202 } IEM_MC_ENDIF();
8203 IEM_MC_END();
8204 }
8205 else
8206 {
8207 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8209
8210 IEM_MC_BEGIN(0, 0);
8211 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8212 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8213 } IEM_MC_ELSE() {
8214 IEM_MC_ADVANCE_RIP_AND_FINISH();
8215 } IEM_MC_ENDIF();
8216 IEM_MC_END();
8217 }
8218}
8219
8220
8221/** Opcode 0x0f 0x8f. */
8222FNIEMOP_DEF(iemOp_jnle_Jv)
8223{
8224 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8225 IEMOP_HLP_MIN_386();
8226 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8227 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8228 {
8229 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8231
8232 IEM_MC_BEGIN(0, 0);
8233 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8234 IEM_MC_ADVANCE_RIP_AND_FINISH();
8235 } IEM_MC_ELSE() {
8236 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8237 } IEM_MC_ENDIF();
8238 IEM_MC_END();
8239 }
8240 else
8241 {
8242 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8244
8245 IEM_MC_BEGIN(0, 0);
8246 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8247 IEM_MC_ADVANCE_RIP_AND_FINISH();
8248 } IEM_MC_ELSE() {
8249 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8250 } IEM_MC_ENDIF();
8251 IEM_MC_END();
8252 }
8253}
8254
8255
8256/** Opcode 0x0f 0x90. */
8257FNIEMOP_DEF(iemOp_seto_Eb)
8258{
8259 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8260 IEMOP_HLP_MIN_386();
8261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8262
8263 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8264 * any way. AMD says it's "unused", whatever that means. We're
8265 * ignoring for now. */
8266 if (IEM_IS_MODRM_REG_MODE(bRm))
8267 {
8268 /* register target */
8269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8270 IEM_MC_BEGIN(0, 0);
8271 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8272 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8273 } IEM_MC_ELSE() {
8274 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8275 } IEM_MC_ENDIF();
8276 IEM_MC_ADVANCE_RIP_AND_FINISH();
8277 IEM_MC_END();
8278 }
8279 else
8280 {
8281 /* memory target */
8282 IEM_MC_BEGIN(0, 1);
8283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8286 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8287 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8288 } IEM_MC_ELSE() {
8289 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8290 } IEM_MC_ENDIF();
8291 IEM_MC_ADVANCE_RIP_AND_FINISH();
8292 IEM_MC_END();
8293 }
8294}
8295
8296
8297/** Opcode 0x0f 0x91. */
8298FNIEMOP_DEF(iemOp_setno_Eb)
8299{
8300 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8301 IEMOP_HLP_MIN_386();
8302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8303
8304 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8305 * any way. AMD says it's "unused", whatever that means. We're
8306 * ignoring for now. */
8307 if (IEM_IS_MODRM_REG_MODE(bRm))
8308 {
8309 /* register target */
8310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8311 IEM_MC_BEGIN(0, 0);
8312 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8313 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8314 } IEM_MC_ELSE() {
8315 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8316 } IEM_MC_ENDIF();
8317 IEM_MC_ADVANCE_RIP_AND_FINISH();
8318 IEM_MC_END();
8319 }
8320 else
8321 {
8322 /* memory target */
8323 IEM_MC_BEGIN(0, 1);
8324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8327 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8328 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8329 } IEM_MC_ELSE() {
8330 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8331 } IEM_MC_ENDIF();
8332 IEM_MC_ADVANCE_RIP_AND_FINISH();
8333 IEM_MC_END();
8334 }
8335}
8336
8337
8338/** Opcode 0x0f 0x92. */
8339FNIEMOP_DEF(iemOp_setc_Eb)
8340{
8341 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8342 IEMOP_HLP_MIN_386();
8343 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8344
8345 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8346 * any way. AMD says it's "unused", whatever that means. We're
8347 * ignoring for now. */
8348 if (IEM_IS_MODRM_REG_MODE(bRm))
8349 {
8350 /* register target */
8351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8352 IEM_MC_BEGIN(0, 0);
8353 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8354 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8355 } IEM_MC_ELSE() {
8356 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8357 } IEM_MC_ENDIF();
8358 IEM_MC_ADVANCE_RIP_AND_FINISH();
8359 IEM_MC_END();
8360 }
8361 else
8362 {
8363 /* memory target */
8364 IEM_MC_BEGIN(0, 1);
8365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8368 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8369 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8370 } IEM_MC_ELSE() {
8371 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8372 } IEM_MC_ENDIF();
8373 IEM_MC_ADVANCE_RIP_AND_FINISH();
8374 IEM_MC_END();
8375 }
8376}
8377
8378
8379/** Opcode 0x0f 0x93. */
8380FNIEMOP_DEF(iemOp_setnc_Eb)
8381{
8382 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8383 IEMOP_HLP_MIN_386();
8384 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8385
8386 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8387 * any way. AMD says it's "unused", whatever that means. We're
8388 * ignoring for now. */
8389 if (IEM_IS_MODRM_REG_MODE(bRm))
8390 {
8391 /* register target */
8392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8393 IEM_MC_BEGIN(0, 0);
8394 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8395 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8396 } IEM_MC_ELSE() {
8397 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8398 } IEM_MC_ENDIF();
8399 IEM_MC_ADVANCE_RIP_AND_FINISH();
8400 IEM_MC_END();
8401 }
8402 else
8403 {
8404 /* memory target */
8405 IEM_MC_BEGIN(0, 1);
8406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8409 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8410 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8411 } IEM_MC_ELSE() {
8412 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8413 } IEM_MC_ENDIF();
8414 IEM_MC_ADVANCE_RIP_AND_FINISH();
8415 IEM_MC_END();
8416 }
8417}
8418
8419
8420/** Opcode 0x0f 0x94. */
8421FNIEMOP_DEF(iemOp_sete_Eb)
8422{
8423 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8424 IEMOP_HLP_MIN_386();
8425 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8426
8427 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8428 * any way. AMD says it's "unused", whatever that means. We're
8429 * ignoring for now. */
8430 if (IEM_IS_MODRM_REG_MODE(bRm))
8431 {
8432 /* register target */
8433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8434 IEM_MC_BEGIN(0, 0);
8435 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8436 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8437 } IEM_MC_ELSE() {
8438 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8439 } IEM_MC_ENDIF();
8440 IEM_MC_ADVANCE_RIP_AND_FINISH();
8441 IEM_MC_END();
8442 }
8443 else
8444 {
8445 /* memory target */
8446 IEM_MC_BEGIN(0, 1);
8447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8450 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8451 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8452 } IEM_MC_ELSE() {
8453 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8454 } IEM_MC_ENDIF();
8455 IEM_MC_ADVANCE_RIP_AND_FINISH();
8456 IEM_MC_END();
8457 }
8458}
8459
8460
8461/** Opcode 0x0f 0x95. */
8462FNIEMOP_DEF(iemOp_setne_Eb)
8463{
8464 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8465 IEMOP_HLP_MIN_386();
8466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8467
8468 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8469 * any way. AMD says it's "unused", whatever that means. We're
8470 * ignoring for now. */
8471 if (IEM_IS_MODRM_REG_MODE(bRm))
8472 {
8473 /* register target */
8474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8475 IEM_MC_BEGIN(0, 0);
8476 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8477 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8478 } IEM_MC_ELSE() {
8479 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8480 } IEM_MC_ENDIF();
8481 IEM_MC_ADVANCE_RIP_AND_FINISH();
8482 IEM_MC_END();
8483 }
8484 else
8485 {
8486 /* memory target */
8487 IEM_MC_BEGIN(0, 1);
8488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8491 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8492 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8493 } IEM_MC_ELSE() {
8494 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8495 } IEM_MC_ENDIF();
8496 IEM_MC_ADVANCE_RIP_AND_FINISH();
8497 IEM_MC_END();
8498 }
8499}
8500
8501
8502/** Opcode 0x0f 0x96. */
8503FNIEMOP_DEF(iemOp_setbe_Eb)
8504{
8505 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8506 IEMOP_HLP_MIN_386();
8507 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8508
8509 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8510 * any way. AMD says it's "unused", whatever that means. We're
8511 * ignoring for now. */
8512 if (IEM_IS_MODRM_REG_MODE(bRm))
8513 {
8514 /* register target */
8515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8516 IEM_MC_BEGIN(0, 0);
8517 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8518 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8519 } IEM_MC_ELSE() {
8520 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8521 } IEM_MC_ENDIF();
8522 IEM_MC_ADVANCE_RIP_AND_FINISH();
8523 IEM_MC_END();
8524 }
8525 else
8526 {
8527 /* memory target */
8528 IEM_MC_BEGIN(0, 1);
8529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8532 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8533 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8534 } IEM_MC_ELSE() {
8535 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8536 } IEM_MC_ENDIF();
8537 IEM_MC_ADVANCE_RIP_AND_FINISH();
8538 IEM_MC_END();
8539 }
8540}
8541
8542
8543/** Opcode 0x0f 0x97. */
8544FNIEMOP_DEF(iemOp_setnbe_Eb)
8545{
8546 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8547 IEMOP_HLP_MIN_386();
8548 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8549
8550 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8551 * any way. AMD says it's "unused", whatever that means. We're
8552 * ignoring for now. */
8553 if (IEM_IS_MODRM_REG_MODE(bRm))
8554 {
8555 /* register target */
8556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8557 IEM_MC_BEGIN(0, 0);
8558 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8559 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8560 } IEM_MC_ELSE() {
8561 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8562 } IEM_MC_ENDIF();
8563 IEM_MC_ADVANCE_RIP_AND_FINISH();
8564 IEM_MC_END();
8565 }
8566 else
8567 {
8568 /* memory target */
8569 IEM_MC_BEGIN(0, 1);
8570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8573 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8574 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8575 } IEM_MC_ELSE() {
8576 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8577 } IEM_MC_ENDIF();
8578 IEM_MC_ADVANCE_RIP_AND_FINISH();
8579 IEM_MC_END();
8580 }
8581}
8582
8583
8584/** Opcode 0x0f 0x98. */
8585FNIEMOP_DEF(iemOp_sets_Eb)
8586{
8587 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8588 IEMOP_HLP_MIN_386();
8589 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8590
8591 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8592 * any way. AMD says it's "unused", whatever that means. We're
8593 * ignoring for now. */
8594 if (IEM_IS_MODRM_REG_MODE(bRm))
8595 {
8596 /* register target */
8597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8598 IEM_MC_BEGIN(0, 0);
8599 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8600 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8601 } IEM_MC_ELSE() {
8602 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8603 } IEM_MC_ENDIF();
8604 IEM_MC_ADVANCE_RIP_AND_FINISH();
8605 IEM_MC_END();
8606 }
8607 else
8608 {
8609 /* memory target */
8610 IEM_MC_BEGIN(0, 1);
8611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8614 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8615 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8616 } IEM_MC_ELSE() {
8617 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8618 } IEM_MC_ENDIF();
8619 IEM_MC_ADVANCE_RIP_AND_FINISH();
8620 IEM_MC_END();
8621 }
8622}
8623
8624
8625/** Opcode 0x0f 0x99. */
8626FNIEMOP_DEF(iemOp_setns_Eb)
8627{
8628 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8629 IEMOP_HLP_MIN_386();
8630 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8631
8632 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8633 * any way. AMD says it's "unused", whatever that means. We're
8634 * ignoring for now. */
8635 if (IEM_IS_MODRM_REG_MODE(bRm))
8636 {
8637 /* register target */
8638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8639 IEM_MC_BEGIN(0, 0);
8640 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8641 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8642 } IEM_MC_ELSE() {
8643 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8644 } IEM_MC_ENDIF();
8645 IEM_MC_ADVANCE_RIP_AND_FINISH();
8646 IEM_MC_END();
8647 }
8648 else
8649 {
8650 /* memory target */
8651 IEM_MC_BEGIN(0, 1);
8652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8655 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8656 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8657 } IEM_MC_ELSE() {
8658 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8659 } IEM_MC_ENDIF();
8660 IEM_MC_ADVANCE_RIP_AND_FINISH();
8661 IEM_MC_END();
8662 }
8663}
8664
8665
8666/** Opcode 0x0f 0x9a. */
8667FNIEMOP_DEF(iemOp_setp_Eb)
8668{
8669 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8670 IEMOP_HLP_MIN_386();
8671 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8672
8673 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8674 * any way. AMD says it's "unused", whatever that means. We're
8675 * ignoring for now. */
8676 if (IEM_IS_MODRM_REG_MODE(bRm))
8677 {
8678 /* register target */
8679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8680 IEM_MC_BEGIN(0, 0);
8681 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8682 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8683 } IEM_MC_ELSE() {
8684 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8685 } IEM_MC_ENDIF();
8686 IEM_MC_ADVANCE_RIP_AND_FINISH();
8687 IEM_MC_END();
8688 }
8689 else
8690 {
8691 /* memory target */
8692 IEM_MC_BEGIN(0, 1);
8693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8696 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8697 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8698 } IEM_MC_ELSE() {
8699 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8700 } IEM_MC_ENDIF();
8701 IEM_MC_ADVANCE_RIP_AND_FINISH();
8702 IEM_MC_END();
8703 }
8704}
8705
8706
8707/** Opcode 0x0f 0x9b. */
8708FNIEMOP_DEF(iemOp_setnp_Eb)
8709{
8710 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8711 IEMOP_HLP_MIN_386();
8712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8713
8714 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8715 * any way. AMD says it's "unused", whatever that means. We're
8716 * ignoring for now. */
8717 if (IEM_IS_MODRM_REG_MODE(bRm))
8718 {
8719 /* register target */
8720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8721 IEM_MC_BEGIN(0, 0);
8722 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8723 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8724 } IEM_MC_ELSE() {
8725 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8726 } IEM_MC_ENDIF();
8727 IEM_MC_ADVANCE_RIP_AND_FINISH();
8728 IEM_MC_END();
8729 }
8730 else
8731 {
8732 /* memory target */
8733 IEM_MC_BEGIN(0, 1);
8734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8737 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8738 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8739 } IEM_MC_ELSE() {
8740 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8741 } IEM_MC_ENDIF();
8742 IEM_MC_ADVANCE_RIP_AND_FINISH();
8743 IEM_MC_END();
8744 }
8745}
8746
8747
8748/** Opcode 0x0f 0x9c. */
8749FNIEMOP_DEF(iemOp_setl_Eb)
8750{
8751 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8752 IEMOP_HLP_MIN_386();
8753 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8754
8755 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8756 * any way. AMD says it's "unused", whatever that means. We're
8757 * ignoring for now. */
8758 if (IEM_IS_MODRM_REG_MODE(bRm))
8759 {
8760 /* register target */
8761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8762 IEM_MC_BEGIN(0, 0);
8763 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8764 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8765 } IEM_MC_ELSE() {
8766 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8767 } IEM_MC_ENDIF();
8768 IEM_MC_ADVANCE_RIP_AND_FINISH();
8769 IEM_MC_END();
8770 }
8771 else
8772 {
8773 /* memory target */
8774 IEM_MC_BEGIN(0, 1);
8775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8778 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8779 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8780 } IEM_MC_ELSE() {
8781 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8782 } IEM_MC_ENDIF();
8783 IEM_MC_ADVANCE_RIP_AND_FINISH();
8784 IEM_MC_END();
8785 }
8786}
8787
8788
8789/** Opcode 0x0f 0x9d. */
8790FNIEMOP_DEF(iemOp_setnl_Eb)
8791{
8792 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8793 IEMOP_HLP_MIN_386();
8794 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8795
8796 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8797 * any way. AMD says it's "unused", whatever that means. We're
8798 * ignoring for now. */
8799 if (IEM_IS_MODRM_REG_MODE(bRm))
8800 {
8801 /* register target */
8802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8803 IEM_MC_BEGIN(0, 0);
8804 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8805 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8806 } IEM_MC_ELSE() {
8807 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8808 } IEM_MC_ENDIF();
8809 IEM_MC_ADVANCE_RIP_AND_FINISH();
8810 IEM_MC_END();
8811 }
8812 else
8813 {
8814 /* memory target */
8815 IEM_MC_BEGIN(0, 1);
8816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8819 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8820 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8821 } IEM_MC_ELSE() {
8822 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8823 } IEM_MC_ENDIF();
8824 IEM_MC_ADVANCE_RIP_AND_FINISH();
8825 IEM_MC_END();
8826 }
8827}
8828
8829
8830/** Opcode 0x0f 0x9e. */
8831FNIEMOP_DEF(iemOp_setle_Eb)
8832{
8833 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8834 IEMOP_HLP_MIN_386();
8835 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8836
8837 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8838 * any way. AMD says it's "unused", whatever that means. We're
8839 * ignoring for now. */
8840 if (IEM_IS_MODRM_REG_MODE(bRm))
8841 {
8842 /* register target */
8843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8844 IEM_MC_BEGIN(0, 0);
8845 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8846 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8847 } IEM_MC_ELSE() {
8848 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8849 } IEM_MC_ENDIF();
8850 IEM_MC_ADVANCE_RIP_AND_FINISH();
8851 IEM_MC_END();
8852 }
8853 else
8854 {
8855 /* memory target */
8856 IEM_MC_BEGIN(0, 1);
8857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8860 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8861 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8862 } IEM_MC_ELSE() {
8863 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8864 } IEM_MC_ENDIF();
8865 IEM_MC_ADVANCE_RIP_AND_FINISH();
8866 IEM_MC_END();
8867 }
8868}
8869
8870
8871/** Opcode 0x0f 0x9f. */
8872FNIEMOP_DEF(iemOp_setnle_Eb)
8873{
8874 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8875 IEMOP_HLP_MIN_386();
8876 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8877
8878 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8879 * any way. AMD says it's "unused", whatever that means. We're
8880 * ignoring for now. */
8881 if (IEM_IS_MODRM_REG_MODE(bRm))
8882 {
8883 /* register target */
8884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8885 IEM_MC_BEGIN(0, 0);
8886 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8887 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8888 } IEM_MC_ELSE() {
8889 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8890 } IEM_MC_ENDIF();
8891 IEM_MC_ADVANCE_RIP_AND_FINISH();
8892 IEM_MC_END();
8893 }
8894 else
8895 {
8896 /* memory target */
8897 IEM_MC_BEGIN(0, 1);
8898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8901 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8902 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8903 } IEM_MC_ELSE() {
8904 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8905 } IEM_MC_ENDIF();
8906 IEM_MC_ADVANCE_RIP_AND_FINISH();
8907 IEM_MC_END();
8908 }
8909}
8910
8911
8912/**
8913 * Common 'push segment-register' helper.
8914 */
8915FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
8916{
8917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8918 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
8919 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8920
8921 switch (pVCpu->iem.s.enmEffOpSize)
8922 {
8923 case IEMMODE_16BIT:
8924 IEM_MC_BEGIN(0, 1);
8925 IEM_MC_LOCAL(uint16_t, u16Value);
8926 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
8927 IEM_MC_PUSH_U16(u16Value);
8928 IEM_MC_ADVANCE_RIP_AND_FINISH();
8929 IEM_MC_END();
8930 break;
8931
8932 case IEMMODE_32BIT:
8933 IEM_MC_BEGIN(0, 1);
8934 IEM_MC_LOCAL(uint32_t, u32Value);
8935 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
8936 IEM_MC_PUSH_U32_SREG(u32Value);
8937 IEM_MC_ADVANCE_RIP_AND_FINISH();
8938 IEM_MC_END();
8939 break;
8940
8941 case IEMMODE_64BIT:
8942 IEM_MC_BEGIN(0, 1);
8943 IEM_MC_LOCAL(uint64_t, u64Value);
8944 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
8945 IEM_MC_PUSH_U64(u64Value);
8946 IEM_MC_ADVANCE_RIP_AND_FINISH();
8947 IEM_MC_END();
8948 break;
8949
8950 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8951 }
8952}
8953
8954
8955/** Opcode 0x0f 0xa0. */
8956FNIEMOP_DEF(iemOp_push_fs)
8957{
8958 IEMOP_MNEMONIC(push_fs, "push fs");
8959 IEMOP_HLP_MIN_386();
8960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8961 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8962}
8963
8964
8965/** Opcode 0x0f 0xa1. */
8966FNIEMOP_DEF(iemOp_pop_fs)
8967{
8968 IEMOP_MNEMONIC(pop_fs, "pop fs");
8969 IEMOP_HLP_MIN_386();
8970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8971 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8972}
8973
8974
8975/** Opcode 0x0f 0xa2. */
8976FNIEMOP_DEF(iemOp_cpuid)
8977{
8978 IEMOP_MNEMONIC(cpuid, "cpuid");
8979 IEMOP_HLP_MIN_486(); /* not all 486es. */
8980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8981 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
8982}
8983
8984
8985/**
8986 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8987 * iemOp_bts_Ev_Gv.
8988 */
8989FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
8990{
8991 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8992 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8993
8994 if (IEM_IS_MODRM_REG_MODE(bRm))
8995 {
8996 /* register destination. */
8997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8998 switch (pVCpu->iem.s.enmEffOpSize)
8999 {
9000 case IEMMODE_16BIT:
9001 IEM_MC_BEGIN(3, 0);
9002 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9003 IEM_MC_ARG(uint16_t, u16Src, 1);
9004 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9005
9006 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9007 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
9008 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9009 IEM_MC_REF_EFLAGS(pEFlags);
9010 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9011
9012 IEM_MC_ADVANCE_RIP_AND_FINISH();
9013 IEM_MC_END();
9014 break;
9015
9016 case IEMMODE_32BIT:
9017 IEM_MC_BEGIN(3, 0);
9018 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9019 IEM_MC_ARG(uint32_t, u32Src, 1);
9020 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9021
9022 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9023 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
9024 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9025 IEM_MC_REF_EFLAGS(pEFlags);
9026 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9027
9028 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9029 IEM_MC_ADVANCE_RIP_AND_FINISH();
9030 IEM_MC_END();
9031 break;
9032
9033 case IEMMODE_64BIT:
9034 IEM_MC_BEGIN(3, 0);
9035 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9036 IEM_MC_ARG(uint64_t, u64Src, 1);
9037 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9038
9039 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9040 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
9041 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9042 IEM_MC_REF_EFLAGS(pEFlags);
9043 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9044
9045 IEM_MC_ADVANCE_RIP_AND_FINISH();
9046 IEM_MC_END();
9047 break;
9048
9049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9050 }
9051 }
9052 else
9053 {
9054 /* memory destination. */
9055
9056 uint32_t fAccess;
9057 if (pImpl->pfnLockedU16)
9058 fAccess = IEM_ACCESS_DATA_RW;
9059 else /* BT */
9060 fAccess = IEM_ACCESS_DATA_R;
9061
9062 /** @todo test negative bit offsets! */
9063 switch (pVCpu->iem.s.enmEffOpSize)
9064 {
9065 case IEMMODE_16BIT:
9066 IEM_MC_BEGIN(3, 2);
9067 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9068 IEM_MC_ARG(uint16_t, u16Src, 1);
9069 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9071 IEM_MC_LOCAL(int16_t, i16AddrAdj);
9072
9073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9074 if (pImpl->pfnLockedU16)
9075 IEMOP_HLP_DONE_DECODING();
9076 else
9077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9078 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9079 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
9080 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
9081 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
9082 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
9083 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
9084 IEM_MC_FETCH_EFLAGS(EFlags);
9085
9086 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9087 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9088 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9089 else
9090 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9091 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9092
9093 IEM_MC_COMMIT_EFLAGS(EFlags);
9094 IEM_MC_ADVANCE_RIP_AND_FINISH();
9095 IEM_MC_END();
9096 break;
9097
9098 case IEMMODE_32BIT:
9099 IEM_MC_BEGIN(3, 2);
9100 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9101 IEM_MC_ARG(uint32_t, u32Src, 1);
9102 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9104 IEM_MC_LOCAL(int32_t, i32AddrAdj);
9105
9106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9107 if (pImpl->pfnLockedU16)
9108 IEMOP_HLP_DONE_DECODING();
9109 else
9110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9111 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9112 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
9113 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
9114 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
9115 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
9116 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
9117 IEM_MC_FETCH_EFLAGS(EFlags);
9118
9119 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9120 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9121 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9122 else
9123 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9124 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9125
9126 IEM_MC_COMMIT_EFLAGS(EFlags);
9127 IEM_MC_ADVANCE_RIP_AND_FINISH();
9128 IEM_MC_END();
9129 break;
9130
9131 case IEMMODE_64BIT:
9132 IEM_MC_BEGIN(3, 2);
9133 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9134 IEM_MC_ARG(uint64_t, u64Src, 1);
9135 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9137 IEM_MC_LOCAL(int64_t, i64AddrAdj);
9138
9139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9140 if (pImpl->pfnLockedU16)
9141 IEMOP_HLP_DONE_DECODING();
9142 else
9143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9144 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9145 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
9146 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
9147 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
9148 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
9149 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
9150 IEM_MC_FETCH_EFLAGS(EFlags);
9151
9152 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9153 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9155 else
9156 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9157 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9158
9159 IEM_MC_COMMIT_EFLAGS(EFlags);
9160 IEM_MC_ADVANCE_RIP_AND_FINISH();
9161 IEM_MC_END();
9162 break;
9163
9164 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9165 }
9166 }
9167}
9168
9169
9170/** Opcode 0x0f 0xa3. */
9171FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9172{
9173 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9174 IEMOP_HLP_MIN_386();
9175 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
9176}
9177
9178
9179/**
9180 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9181 */
9182FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
9183{
9184 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9185 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9186
9187 if (IEM_IS_MODRM_REG_MODE(bRm))
9188 {
9189 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9191
9192 switch (pVCpu->iem.s.enmEffOpSize)
9193 {
9194 case IEMMODE_16BIT:
9195 IEM_MC_BEGIN(4, 0);
9196 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9197 IEM_MC_ARG(uint16_t, u16Src, 1);
9198 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9199 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9200
9201 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9202 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9203 IEM_MC_REF_EFLAGS(pEFlags);
9204 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9205
9206 IEM_MC_ADVANCE_RIP_AND_FINISH();
9207 IEM_MC_END();
9208 break;
9209
9210 case IEMMODE_32BIT:
9211 IEM_MC_BEGIN(4, 0);
9212 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9213 IEM_MC_ARG(uint32_t, u32Src, 1);
9214 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9215 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9216
9217 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9218 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9219 IEM_MC_REF_EFLAGS(pEFlags);
9220 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9221
9222 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9223 IEM_MC_ADVANCE_RIP_AND_FINISH();
9224 IEM_MC_END();
9225 break;
9226
9227 case IEMMODE_64BIT:
9228 IEM_MC_BEGIN(4, 0);
9229 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9230 IEM_MC_ARG(uint64_t, u64Src, 1);
9231 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9232 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9233
9234 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9235 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9236 IEM_MC_REF_EFLAGS(pEFlags);
9237 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9238
9239 IEM_MC_ADVANCE_RIP_AND_FINISH();
9240 IEM_MC_END();
9241 break;
9242
9243 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9244 }
9245 }
9246 else
9247 {
9248 switch (pVCpu->iem.s.enmEffOpSize)
9249 {
9250 case IEMMODE_16BIT:
9251 IEM_MC_BEGIN(4, 2);
9252 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9253 IEM_MC_ARG(uint16_t, u16Src, 1);
9254 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9255 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9257
9258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9259 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9260 IEM_MC_ASSIGN(cShiftArg, cShift);
9261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9262 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9263 IEM_MC_FETCH_EFLAGS(EFlags);
9264 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9265 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9266
9267 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9268 IEM_MC_COMMIT_EFLAGS(EFlags);
9269 IEM_MC_ADVANCE_RIP_AND_FINISH();
9270 IEM_MC_END();
9271 break;
9272
9273 case IEMMODE_32BIT:
9274 IEM_MC_BEGIN(4, 2);
9275 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9276 IEM_MC_ARG(uint32_t, u32Src, 1);
9277 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9278 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9280
9281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9282 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9283 IEM_MC_ASSIGN(cShiftArg, cShift);
9284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9285 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9286 IEM_MC_FETCH_EFLAGS(EFlags);
9287 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9288 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9289
9290 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9291 IEM_MC_COMMIT_EFLAGS(EFlags);
9292 IEM_MC_ADVANCE_RIP_AND_FINISH();
9293 IEM_MC_END();
9294 break;
9295
9296 case IEMMODE_64BIT:
9297 IEM_MC_BEGIN(4, 2);
9298 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9299 IEM_MC_ARG(uint64_t, u64Src, 1);
9300 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9301 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9303
9304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9305 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9306 IEM_MC_ASSIGN(cShiftArg, cShift);
9307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9308 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9309 IEM_MC_FETCH_EFLAGS(EFlags);
9310 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9311 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9312
9313 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9314 IEM_MC_COMMIT_EFLAGS(EFlags);
9315 IEM_MC_ADVANCE_RIP_AND_FINISH();
9316 IEM_MC_END();
9317 break;
9318
9319 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9320 }
9321 }
9322}
9323
9324
9325/**
9326 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9327 */
9328FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
9329{
9330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9331 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9332
9333 if (IEM_IS_MODRM_REG_MODE(bRm))
9334 {
9335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9336
9337 switch (pVCpu->iem.s.enmEffOpSize)
9338 {
9339 case IEMMODE_16BIT:
9340 IEM_MC_BEGIN(4, 0);
9341 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9342 IEM_MC_ARG(uint16_t, u16Src, 1);
9343 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9344 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9345
9346 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9347 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9348 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9349 IEM_MC_REF_EFLAGS(pEFlags);
9350 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9351
9352 IEM_MC_ADVANCE_RIP_AND_FINISH();
9353 IEM_MC_END();
9354 break;
9355
9356 case IEMMODE_32BIT:
9357 IEM_MC_BEGIN(4, 0);
9358 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9359 IEM_MC_ARG(uint32_t, u32Src, 1);
9360 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9361 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9362
9363 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9364 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9365 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9366 IEM_MC_REF_EFLAGS(pEFlags);
9367 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9368
9369 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9370 IEM_MC_ADVANCE_RIP_AND_FINISH();
9371 IEM_MC_END();
9372 break;
9373
9374 case IEMMODE_64BIT:
9375 IEM_MC_BEGIN(4, 0);
9376 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9377 IEM_MC_ARG(uint64_t, u64Src, 1);
9378 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9379 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9380
9381 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9382 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9383 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9384 IEM_MC_REF_EFLAGS(pEFlags);
9385 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9386
9387 IEM_MC_ADVANCE_RIP_AND_FINISH();
9388 IEM_MC_END();
9389 break;
9390
9391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9392 }
9393 }
9394 else
9395 {
9396 switch (pVCpu->iem.s.enmEffOpSize)
9397 {
9398 case IEMMODE_16BIT:
9399 IEM_MC_BEGIN(4, 2);
9400 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9401 IEM_MC_ARG(uint16_t, u16Src, 1);
9402 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9403 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9405
9406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9408 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9409 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9410 IEM_MC_FETCH_EFLAGS(EFlags);
9411 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9412 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9413
9414 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9415 IEM_MC_COMMIT_EFLAGS(EFlags);
9416 IEM_MC_ADVANCE_RIP_AND_FINISH();
9417 IEM_MC_END();
9418 break;
9419
9420 case IEMMODE_32BIT:
9421 IEM_MC_BEGIN(4, 2);
9422 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9423 IEM_MC_ARG(uint32_t, u32Src, 1);
9424 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9425 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9427
9428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9430 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9431 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9432 IEM_MC_FETCH_EFLAGS(EFlags);
9433 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9434 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9435
9436 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9437 IEM_MC_COMMIT_EFLAGS(EFlags);
9438 IEM_MC_ADVANCE_RIP_AND_FINISH();
9439 IEM_MC_END();
9440 break;
9441
9442 case IEMMODE_64BIT:
9443 IEM_MC_BEGIN(4, 2);
9444 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9445 IEM_MC_ARG(uint64_t, u64Src, 1);
9446 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9447 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9449
9450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9452 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9453 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9454 IEM_MC_FETCH_EFLAGS(EFlags);
9455 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9456 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9457
9458 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9459 IEM_MC_COMMIT_EFLAGS(EFlags);
9460 IEM_MC_ADVANCE_RIP_AND_FINISH();
9461 IEM_MC_END();
9462 break;
9463
9464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9465 }
9466 }
9467}
9468
9469
9470
9471/** Opcode 0x0f 0xa4. */
9472FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9473{
9474 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9475 IEMOP_HLP_MIN_386();
9476 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9477}
9478
9479
9480/** Opcode 0x0f 0xa5. */
9481FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9482{
9483 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9484 IEMOP_HLP_MIN_386();
9485 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9486}
9487
9488
9489/** Opcode 0x0f 0xa8. */
9490FNIEMOP_DEF(iemOp_push_gs)
9491{
9492 IEMOP_MNEMONIC(push_gs, "push gs");
9493 IEMOP_HLP_MIN_386();
9494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9495 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9496}
9497
9498
9499/** Opcode 0x0f 0xa9. */
9500FNIEMOP_DEF(iemOp_pop_gs)
9501{
9502 IEMOP_MNEMONIC(pop_gs, "pop gs");
9503 IEMOP_HLP_MIN_386();
9504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9505 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9506}
9507
9508
9509/** Opcode 0x0f 0xaa. */
9510FNIEMOP_DEF(iemOp_rsm)
9511{
9512 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9513 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9515 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
9516}
9517
9518
9519
9520/** Opcode 0x0f 0xab. */
9521FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9522{
9523 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9524 IEMOP_HLP_MIN_386();
9525 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
9526}
9527
9528
9529/** Opcode 0x0f 0xac. */
9530FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9531{
9532 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9533 IEMOP_HLP_MIN_386();
9534 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9535}
9536
9537
9538/** Opcode 0x0f 0xad. */
9539FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9540{
9541 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9542 IEMOP_HLP_MIN_386();
9543 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9544}
9545
9546
9547/** Opcode 0x0f 0xae mem/0. */
9548FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9549{
9550 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9551 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9552 return IEMOP_RAISE_INVALID_OPCODE();
9553
9554 IEM_MC_BEGIN(3, 1);
9555 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9556 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9557 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9560 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9561 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9562 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9563 IEM_MC_END();
9564 return VINF_SUCCESS;
9565}
9566
9567
9568/** Opcode 0x0f 0xae mem/1. */
9569FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9570{
9571 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9572 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9573 return IEMOP_RAISE_INVALID_OPCODE();
9574
9575 IEM_MC_BEGIN(3, 1);
9576 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9577 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9578 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9581 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9582 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9583 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9584 IEM_MC_END();
9585 return VINF_SUCCESS;
9586}
9587
9588
9589/**
9590 * @opmaps grp15
9591 * @opcode !11/2
9592 * @oppfx none
9593 * @opcpuid sse
9594 * @opgroup og_sse_mxcsrsm
9595 * @opxcpttype 5
9596 * @optest op1=0 -> mxcsr=0
9597 * @optest op1=0x2083 -> mxcsr=0x2083
9598 * @optest op1=0xfffffffe -> value.xcpt=0xd
9599 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9600 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9601 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9602 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9603 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9604 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9605 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9606 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9607 */
9608FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9609{
9610 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9611 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9612 return IEMOP_RAISE_INVALID_OPCODE();
9613
9614 IEM_MC_BEGIN(2, 0);
9615 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9616 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9619 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9620 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9621 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9622 IEM_MC_END();
9623 return VINF_SUCCESS;
9624}
9625
9626
9627/**
9628 * @opmaps grp15
9629 * @opcode !11/3
9630 * @oppfx none
9631 * @opcpuid sse
9632 * @opgroup og_sse_mxcsrsm
9633 * @opxcpttype 5
9634 * @optest mxcsr=0 -> op1=0
9635 * @optest mxcsr=0x2083 -> op1=0x2083
9636 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9637 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9638 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9639 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9640 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9641 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9642 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9643 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9644 */
9645FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9646{
9647 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9648 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9649 return IEMOP_RAISE_INVALID_OPCODE();
9650
9651 IEM_MC_BEGIN(2, 0);
9652 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9653 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9656 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9657 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9658 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9659 IEM_MC_END();
9660 return VINF_SUCCESS;
9661}
9662
9663
9664/**
9665 * @opmaps grp15
9666 * @opcode !11/4
9667 * @oppfx none
9668 * @opcpuid xsave
9669 * @opgroup og_system
9670 * @opxcpttype none
9671 */
9672FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9673{
9674 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9675 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9676 return IEMOP_RAISE_INVALID_OPCODE();
9677
9678 IEM_MC_BEGIN(3, 0);
9679 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9680 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9681 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9682 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9684 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9685 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9686 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9687 IEM_MC_END();
9688 return VINF_SUCCESS;
9689}
9690
9691
9692/**
9693 * @opmaps grp15
9694 * @opcode !11/5
9695 * @oppfx none
9696 * @opcpuid xsave
9697 * @opgroup og_system
9698 * @opxcpttype none
9699 */
9700FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9701{
9702 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9703 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9704 return IEMOP_RAISE_INVALID_OPCODE();
9705
9706 IEM_MC_BEGIN(3, 0);
9707 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9708 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9709 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9712 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9713 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9714 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9715 IEM_MC_END();
9716 return VINF_SUCCESS;
9717}
9718
9719/** Opcode 0x0f 0xae mem/6. */
9720FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9721
9722/**
9723 * @opmaps grp15
9724 * @opcode !11/7
9725 * @oppfx none
9726 * @opcpuid clfsh
9727 * @opgroup og_cachectl
9728 * @optest op1=1 ->
9729 */
9730FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9731{
9732 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9733 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9734 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9735
9736 IEM_MC_BEGIN(2, 0);
9737 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9738 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9741 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9742 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9743 IEM_MC_END();
9744 return VINF_SUCCESS;
9745}
9746
9747/**
9748 * @opmaps grp15
9749 * @opcode !11/7
9750 * @oppfx 0x66
9751 * @opcpuid clflushopt
9752 * @opgroup og_cachectl
9753 * @optest op1=1 ->
9754 */
9755FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9756{
9757 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9758 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9759 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9760
9761 IEM_MC_BEGIN(2, 0);
9762 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9763 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9766 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9767 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9768 IEM_MC_END();
9769 return VINF_SUCCESS;
9770}
9771
9772
9773/** Opcode 0x0f 0xae 11b/5. */
9774FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9775{
9776 RT_NOREF_PV(bRm);
9777 IEMOP_MNEMONIC(lfence, "lfence");
9778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9779 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9780 return IEMOP_RAISE_INVALID_OPCODE();
9781
9782 IEM_MC_BEGIN(0, 0);
9783#ifndef RT_ARCH_ARM64
9784 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9785#endif
9786 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9787#ifndef RT_ARCH_ARM64
9788 else
9789 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9790#endif
9791 IEM_MC_ADVANCE_RIP_AND_FINISH();
9792 IEM_MC_END();
9793}
9794
9795
9796/** Opcode 0x0f 0xae 11b/6. */
9797FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9798{
9799 RT_NOREF_PV(bRm);
9800 IEMOP_MNEMONIC(mfence, "mfence");
9801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9802 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9803 return IEMOP_RAISE_INVALID_OPCODE();
9804
9805 IEM_MC_BEGIN(0, 0);
9806#ifndef RT_ARCH_ARM64
9807 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9808#endif
9809 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9810#ifndef RT_ARCH_ARM64
9811 else
9812 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9813#endif
9814 IEM_MC_ADVANCE_RIP_AND_FINISH();
9815 IEM_MC_END();
9816}
9817
9818
9819/** Opcode 0x0f 0xae 11b/7. */
9820FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9821{
9822 RT_NOREF_PV(bRm);
9823 IEMOP_MNEMONIC(sfence, "sfence");
9824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9825 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9826 return IEMOP_RAISE_INVALID_OPCODE();
9827
9828 IEM_MC_BEGIN(0, 0);
9829#ifndef RT_ARCH_ARM64
9830 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9831#endif
9832 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9833#ifndef RT_ARCH_ARM64
9834 else
9835 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9836#endif
9837 IEM_MC_ADVANCE_RIP_AND_FINISH();
9838 IEM_MC_END();
9839}
9840
9841
9842/** Opcode 0xf3 0x0f 0xae 11b/0. */
9843FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9844{
9845 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9847 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9848 {
9849 IEM_MC_BEGIN(1, 0);
9850 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9851 IEM_MC_ARG(uint64_t, u64Dst, 0);
9852 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9853 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9854 IEM_MC_ADVANCE_RIP_AND_FINISH();
9855 IEM_MC_END();
9856 }
9857 else
9858 {
9859 IEM_MC_BEGIN(1, 0);
9860 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9861 IEM_MC_ARG(uint32_t, u32Dst, 0);
9862 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
9863 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9864 IEM_MC_ADVANCE_RIP_AND_FINISH();
9865 IEM_MC_END();
9866 }
9867}
9868
9869
9870/** Opcode 0xf3 0x0f 0xae 11b/1. */
9871FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
9872{
9873 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
9874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9875 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9876 {
9877 IEM_MC_BEGIN(1, 0);
9878 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9879 IEM_MC_ARG(uint64_t, u64Dst, 0);
9880 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
9881 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9882 IEM_MC_ADVANCE_RIP_AND_FINISH();
9883 IEM_MC_END();
9884 }
9885 else
9886 {
9887 IEM_MC_BEGIN(1, 0);
9888 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9889 IEM_MC_ARG(uint32_t, u32Dst, 0);
9890 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
9891 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9892 IEM_MC_ADVANCE_RIP_AND_FINISH();
9893 IEM_MC_END();
9894 }
9895}
9896
9897
9898/** Opcode 0xf3 0x0f 0xae 11b/2. */
9899FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
9900{
9901 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
9902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9903 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9904 {
9905 IEM_MC_BEGIN(1, 0);
9906 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9907 IEM_MC_ARG(uint64_t, u64Dst, 0);
9908 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9909 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9910 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
9911 IEM_MC_ADVANCE_RIP_AND_FINISH();
9912 IEM_MC_END();
9913 }
9914 else
9915 {
9916 IEM_MC_BEGIN(1, 0);
9917 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9918 IEM_MC_ARG(uint32_t, u32Dst, 0);
9919 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9920 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
9921 IEM_MC_ADVANCE_RIP_AND_FINISH();
9922 IEM_MC_END();
9923 }
9924}
9925
9926
9927/** Opcode 0xf3 0x0f 0xae 11b/3. */
9928FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
9929{
9930 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
9931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9932 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9933 {
9934 IEM_MC_BEGIN(1, 0);
9935 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9936 IEM_MC_ARG(uint64_t, u64Dst, 0);
9937 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9938 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9939 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
9940 IEM_MC_ADVANCE_RIP_AND_FINISH();
9941 IEM_MC_END();
9942 }
9943 else
9944 {
9945 IEM_MC_BEGIN(1, 0);
9946 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9947 IEM_MC_ARG(uint32_t, u32Dst, 0);
9948 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9949 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
9950 IEM_MC_ADVANCE_RIP_AND_FINISH();
9951 IEM_MC_END();
9952 }
9953}
9954
9955
9956/**
9957 * Group 15 jump table for register variant.
9958 */
9959IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
9960{ /* pfx: none, 066h, 0f3h, 0f2h */
9961 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
9962 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
9963 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
9964 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
9965 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9966 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9967 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9968 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9969};
9970AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
9971
9972
9973/**
9974 * Group 15 jump table for memory variant.
9975 */
9976IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
9977{ /* pfx: none, 066h, 0f3h, 0f2h */
9978 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9979 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9980 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9981 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9982 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9983 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9984 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9985 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9986};
9987AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
9988
9989
9990/** Opcode 0x0f 0xae. */
9991FNIEMOP_DEF(iemOp_Grp15)
9992{
9993 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
9994 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9995 if (IEM_IS_MODRM_REG_MODE(bRm))
9996 /* register, register */
9997 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9998 + pVCpu->iem.s.idxPrefix], bRm);
9999 /* memory, register */
10000 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10001 + pVCpu->iem.s.idxPrefix], bRm);
10002}
10003
10004
10005/** Opcode 0x0f 0xaf. */
10006FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10007{
10008 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10009 IEMOP_HLP_MIN_386();
10010 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10011 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags));
10012}
10013
10014
10015/** Opcode 0x0f 0xb0. */
10016FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10017{
10018 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10019 IEMOP_HLP_MIN_486();
10020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10021
10022 if (IEM_IS_MODRM_REG_MODE(bRm))
10023 {
10024 IEMOP_HLP_DONE_DECODING();
10025 IEM_MC_BEGIN(4, 0);
10026 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10027 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10028 IEM_MC_ARG(uint8_t, u8Src, 2);
10029 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10030
10031 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10032 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10033 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10034 IEM_MC_REF_EFLAGS(pEFlags);
10035 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10036 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10037 else
10038 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10039
10040 IEM_MC_ADVANCE_RIP_AND_FINISH();
10041 IEM_MC_END();
10042 }
10043 else
10044 {
10045 IEM_MC_BEGIN(4, 3);
10046 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10047 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10048 IEM_MC_ARG(uint8_t, u8Src, 2);
10049 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10051 IEM_MC_LOCAL(uint8_t, u8Al);
10052
10053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10054 IEMOP_HLP_DONE_DECODING();
10055 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10056 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10057 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
10058 IEM_MC_FETCH_EFLAGS(EFlags);
10059 IEM_MC_REF_LOCAL(pu8Al, u8Al);
10060 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10061 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10062 else
10063 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10064
10065 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10066 IEM_MC_COMMIT_EFLAGS(EFlags);
10067 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
10068 IEM_MC_ADVANCE_RIP_AND_FINISH();
10069 IEM_MC_END();
10070 }
10071}
10072
10073/** Opcode 0x0f 0xb1. */
10074FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10075{
10076 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10077 IEMOP_HLP_MIN_486();
10078 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10079
10080 if (IEM_IS_MODRM_REG_MODE(bRm))
10081 {
10082 IEMOP_HLP_DONE_DECODING();
10083 switch (pVCpu->iem.s.enmEffOpSize)
10084 {
10085 case IEMMODE_16BIT:
10086 IEM_MC_BEGIN(4, 0);
10087 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10088 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10089 IEM_MC_ARG(uint16_t, u16Src, 2);
10090 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10091
10092 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10093 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10094 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10095 IEM_MC_REF_EFLAGS(pEFlags);
10096 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10097 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10098 else
10099 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10100
10101 IEM_MC_ADVANCE_RIP_AND_FINISH();
10102 IEM_MC_END();
10103 break;
10104
10105 case IEMMODE_32BIT:
10106 IEM_MC_BEGIN(4, 0);
10107 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10108 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10109 IEM_MC_ARG(uint32_t, u32Src, 2);
10110 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10111
10112 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10113 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10114 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10115 IEM_MC_REF_EFLAGS(pEFlags);
10116 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10117 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10118 else
10119 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10120
10121 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10122 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10123 } IEM_MC_ELSE() {
10124 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
10125 } IEM_MC_ENDIF();
10126
10127 IEM_MC_ADVANCE_RIP_AND_FINISH();
10128 IEM_MC_END();
10129 break;
10130
10131 case IEMMODE_64BIT:
10132 IEM_MC_BEGIN(4, 0);
10133 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10134 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10135#ifdef RT_ARCH_X86
10136 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10137#else
10138 IEM_MC_ARG(uint64_t, u64Src, 2);
10139#endif
10140 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10141
10142 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10143 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10144 IEM_MC_REF_EFLAGS(pEFlags);
10145#ifdef RT_ARCH_X86
10146 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10147 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10148 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10149 else
10150 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10151#else
10152 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10153 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10154 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10155 else
10156 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10157#endif
10158
10159 IEM_MC_ADVANCE_RIP_AND_FINISH();
10160 IEM_MC_END();
10161 break;
10162
10163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10164 }
10165 }
10166 else
10167 {
10168 switch (pVCpu->iem.s.enmEffOpSize)
10169 {
10170 case IEMMODE_16BIT:
10171 IEM_MC_BEGIN(4, 3);
10172 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10173 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10174 IEM_MC_ARG(uint16_t, u16Src, 2);
10175 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10177 IEM_MC_LOCAL(uint16_t, u16Ax);
10178
10179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10180 IEMOP_HLP_DONE_DECODING();
10181 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10182 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10183 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
10184 IEM_MC_FETCH_EFLAGS(EFlags);
10185 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
10186 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10187 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10188 else
10189 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10190
10191 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10192 IEM_MC_COMMIT_EFLAGS(EFlags);
10193 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
10194 IEM_MC_ADVANCE_RIP_AND_FINISH();
10195 IEM_MC_END();
10196 break;
10197
10198 case IEMMODE_32BIT:
10199 IEM_MC_BEGIN(4, 3);
10200 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10201 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10202 IEM_MC_ARG(uint32_t, u32Src, 2);
10203 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10205 IEM_MC_LOCAL(uint32_t, u32Eax);
10206
10207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10208 IEMOP_HLP_DONE_DECODING();
10209 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10210 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10211 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
10212 IEM_MC_FETCH_EFLAGS(EFlags);
10213 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
10214 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10215 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10216 else
10217 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10218
10219 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10220 IEM_MC_COMMIT_EFLAGS(EFlags);
10221
10222 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10223 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
10224 IEM_MC_ENDIF();
10225
10226 IEM_MC_ADVANCE_RIP_AND_FINISH();
10227 IEM_MC_END();
10228 break;
10229
10230 case IEMMODE_64BIT:
10231 IEM_MC_BEGIN(4, 3);
10232 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10233 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10234#ifdef RT_ARCH_X86
10235 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10236#else
10237 IEM_MC_ARG(uint64_t, u64Src, 2);
10238#endif
10239 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10241 IEM_MC_LOCAL(uint64_t, u64Rax);
10242
10243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10244 IEMOP_HLP_DONE_DECODING();
10245 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10246 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
10247 IEM_MC_FETCH_EFLAGS(EFlags);
10248 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
10249#ifdef RT_ARCH_X86
10250 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10251 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10252 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10253 else
10254 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10255#else
10256 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10257 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10258 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10259 else
10260 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10261#endif
10262
10263 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10264 IEM_MC_COMMIT_EFLAGS(EFlags);
10265 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
10266 IEM_MC_ADVANCE_RIP_AND_FINISH();
10267 IEM_MC_END();
10268 break;
10269
10270 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10271 }
10272 }
10273}
10274
10275
10276FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
10277{
10278 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
10279 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
10280
10281 switch (pVCpu->iem.s.enmEffOpSize)
10282 {
10283 case IEMMODE_16BIT:
10284 IEM_MC_BEGIN(5, 1);
10285 IEM_MC_ARG(uint16_t, uSel, 0);
10286 IEM_MC_ARG(uint16_t, offSeg, 1);
10287 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10288 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10289 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10290 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10293 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10294 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
10295 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10296 IEM_MC_END();
10297 return VINF_SUCCESS;
10298
10299 case IEMMODE_32BIT:
10300 IEM_MC_BEGIN(5, 1);
10301 IEM_MC_ARG(uint16_t, uSel, 0);
10302 IEM_MC_ARG(uint32_t, offSeg, 1);
10303 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10304 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10305 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10306 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10309 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10310 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
10311 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10312 IEM_MC_END();
10313 return VINF_SUCCESS;
10314
10315 case IEMMODE_64BIT:
10316 IEM_MC_BEGIN(5, 1);
10317 IEM_MC_ARG(uint16_t, uSel, 0);
10318 IEM_MC_ARG(uint64_t, offSeg, 1);
10319 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10320 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10321 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10322 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10323 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10325 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
10326 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10327 else
10328 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10329 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
10330 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10331 IEM_MC_END();
10332 return VINF_SUCCESS;
10333
10334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10335 }
10336}
10337
10338
10339/** Opcode 0x0f 0xb2. */
10340FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10341{
10342 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10343 IEMOP_HLP_MIN_386();
10344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10345 if (IEM_IS_MODRM_REG_MODE(bRm))
10346 return IEMOP_RAISE_INVALID_OPCODE();
10347 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10348}
10349
10350
10351/** Opcode 0x0f 0xb3. */
10352FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10353{
10354 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10355 IEMOP_HLP_MIN_386();
10356 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
10357}
10358
10359
10360/** Opcode 0x0f 0xb4. */
10361FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10362{
10363 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10364 IEMOP_HLP_MIN_386();
10365 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10366 if (IEM_IS_MODRM_REG_MODE(bRm))
10367 return IEMOP_RAISE_INVALID_OPCODE();
10368 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10369}
10370
10371
10372/** Opcode 0x0f 0xb5. */
10373FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10374{
10375 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10376 IEMOP_HLP_MIN_386();
10377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10378 if (IEM_IS_MODRM_REG_MODE(bRm))
10379 return IEMOP_RAISE_INVALID_OPCODE();
10380 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10381}
10382
10383
10384/** Opcode 0x0f 0xb6. */
10385FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10386{
10387 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10388 IEMOP_HLP_MIN_386();
10389
10390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10391
10392 /*
10393 * If rm is denoting a register, no more instruction bytes.
10394 */
10395 if (IEM_IS_MODRM_REG_MODE(bRm))
10396 {
10397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10398 switch (pVCpu->iem.s.enmEffOpSize)
10399 {
10400 case IEMMODE_16BIT:
10401 IEM_MC_BEGIN(0, 1);
10402 IEM_MC_LOCAL(uint16_t, u16Value);
10403 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10404 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10405 IEM_MC_ADVANCE_RIP_AND_FINISH();
10406 IEM_MC_END();
10407 break;
10408
10409 case IEMMODE_32BIT:
10410 IEM_MC_BEGIN(0, 1);
10411 IEM_MC_LOCAL(uint32_t, u32Value);
10412 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10413 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10414 IEM_MC_ADVANCE_RIP_AND_FINISH();
10415 IEM_MC_END();
10416 break;
10417
10418 case IEMMODE_64BIT:
10419 IEM_MC_BEGIN(0, 1);
10420 IEM_MC_LOCAL(uint64_t, u64Value);
10421 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10422 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10423 IEM_MC_ADVANCE_RIP_AND_FINISH();
10424 IEM_MC_END();
10425 break;
10426
10427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10428 }
10429 }
10430 else
10431 {
10432 /*
10433 * We're loading a register from memory.
10434 */
10435 switch (pVCpu->iem.s.enmEffOpSize)
10436 {
10437 case IEMMODE_16BIT:
10438 IEM_MC_BEGIN(0, 2);
10439 IEM_MC_LOCAL(uint16_t, u16Value);
10440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10443 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10444 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10445 IEM_MC_ADVANCE_RIP_AND_FINISH();
10446 IEM_MC_END();
10447 break;
10448
10449 case IEMMODE_32BIT:
10450 IEM_MC_BEGIN(0, 2);
10451 IEM_MC_LOCAL(uint32_t, u32Value);
10452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10455 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10456 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10457 IEM_MC_ADVANCE_RIP_AND_FINISH();
10458 IEM_MC_END();
10459 break;
10460
10461 case IEMMODE_64BIT:
10462 IEM_MC_BEGIN(0, 2);
10463 IEM_MC_LOCAL(uint64_t, u64Value);
10464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10467 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10468 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10469 IEM_MC_ADVANCE_RIP_AND_FINISH();
10470 IEM_MC_END();
10471 break;
10472
10473 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10474 }
10475 }
10476}
10477
10478
10479/** Opcode 0x0f 0xb7. */
10480FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10481{
10482 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10483 IEMOP_HLP_MIN_386();
10484
10485 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10486
10487 /** @todo Not entirely sure how the operand size prefix is handled here,
10488 * assuming that it will be ignored. Would be nice to have a few
10489 * test for this. */
10490 /*
10491 * If rm is denoting a register, no more instruction bytes.
10492 */
10493 if (IEM_IS_MODRM_REG_MODE(bRm))
10494 {
10495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10496 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10497 {
10498 IEM_MC_BEGIN(0, 1);
10499 IEM_MC_LOCAL(uint32_t, u32Value);
10500 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10501 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10502 IEM_MC_ADVANCE_RIP_AND_FINISH();
10503 IEM_MC_END();
10504 }
10505 else
10506 {
10507 IEM_MC_BEGIN(0, 1);
10508 IEM_MC_LOCAL(uint64_t, u64Value);
10509 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10510 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10511 IEM_MC_ADVANCE_RIP_AND_FINISH();
10512 IEM_MC_END();
10513 }
10514 }
10515 else
10516 {
10517 /*
10518 * We're loading a register from memory.
10519 */
10520 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10521 {
10522 IEM_MC_BEGIN(0, 2);
10523 IEM_MC_LOCAL(uint32_t, u32Value);
10524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10527 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10528 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10529 IEM_MC_ADVANCE_RIP_AND_FINISH();
10530 IEM_MC_END();
10531 }
10532 else
10533 {
10534 IEM_MC_BEGIN(0, 2);
10535 IEM_MC_LOCAL(uint64_t, u64Value);
10536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10539 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10540 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10541 IEM_MC_ADVANCE_RIP_AND_FINISH();
10542 IEM_MC_END();
10543 }
10544 }
10545}
10546
10547
10548/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10549FNIEMOP_UD_STUB(iemOp_jmpe);
10550
10551
10552/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
10553FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10554{
10555 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10556 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10557 return iemOp_InvalidNeedRM(pVCpu);
10558#ifndef TST_IEM_CHECK_MC
10559# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10560 static const IEMOPBINSIZES s_Native =
10561 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10562# endif
10563 static const IEMOPBINSIZES s_Fallback =
10564 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10565#endif
10566 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback));
10567}
10568
10569
10570/**
10571 * @opcode 0xb9
10572 * @opinvalid intel-modrm
10573 * @optest ->
10574 */
10575FNIEMOP_DEF(iemOp_Grp10)
10576{
10577 /*
10578 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10579 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10580 */
10581 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10582 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10583 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10584}
10585
10586
10587/** Opcode 0x0f 0xba. */
10588FNIEMOP_DEF(iemOp_Grp8)
10589{
10590 IEMOP_HLP_MIN_386();
10591 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10592 PCIEMOPBINSIZES pImpl;
10593 switch (IEM_GET_MODRM_REG_8(bRm))
10594 {
10595 case 0: case 1: case 2: case 3:
10596 /* Both AMD and Intel want full modr/m decoding and imm8. */
10597 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
10598 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
10599 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
10600 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
10601 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
10602 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10603 }
10604 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10605
10606 if (IEM_IS_MODRM_REG_MODE(bRm))
10607 {
10608 /* register destination. */
10609 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10611
10612 switch (pVCpu->iem.s.enmEffOpSize)
10613 {
10614 case IEMMODE_16BIT:
10615 IEM_MC_BEGIN(3, 0);
10616 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10617 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
10618 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10619
10620 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10621 IEM_MC_REF_EFLAGS(pEFlags);
10622 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10623
10624 IEM_MC_ADVANCE_RIP_AND_FINISH();
10625 IEM_MC_END();
10626 break;
10627
10628 case IEMMODE_32BIT:
10629 IEM_MC_BEGIN(3, 0);
10630 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10631 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
10632 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10633
10634 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10635 IEM_MC_REF_EFLAGS(pEFlags);
10636 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10637
10638 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10639 IEM_MC_ADVANCE_RIP_AND_FINISH();
10640 IEM_MC_END();
10641 break;
10642
10643 case IEMMODE_64BIT:
10644 IEM_MC_BEGIN(3, 0);
10645 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10646 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
10647 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10648
10649 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10650 IEM_MC_REF_EFLAGS(pEFlags);
10651 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10652
10653 IEM_MC_ADVANCE_RIP_AND_FINISH();
10654 IEM_MC_END();
10655 break;
10656
10657 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10658 }
10659 }
10660 else
10661 {
10662 /* memory destination. */
10663
10664 uint32_t fAccess;
10665 if (pImpl->pfnLockedU16)
10666 fAccess = IEM_ACCESS_DATA_RW;
10667 else /* BT */
10668 fAccess = IEM_ACCESS_DATA_R;
10669
10670 /** @todo test negative bit offsets! */
10671 switch (pVCpu->iem.s.enmEffOpSize)
10672 {
10673 case IEMMODE_16BIT:
10674 IEM_MC_BEGIN(3, 1);
10675 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10676 IEM_MC_ARG(uint16_t, u16Src, 1);
10677 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10679
10680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10681 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10682 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
10683 if (pImpl->pfnLockedU16)
10684 IEMOP_HLP_DONE_DECODING();
10685 else
10686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10687 IEM_MC_FETCH_EFLAGS(EFlags);
10688 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10689 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10690 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10691 else
10692 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10693 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10694
10695 IEM_MC_COMMIT_EFLAGS(EFlags);
10696 IEM_MC_ADVANCE_RIP_AND_FINISH();
10697 IEM_MC_END();
10698 break;
10699
10700 case IEMMODE_32BIT:
10701 IEM_MC_BEGIN(3, 1);
10702 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10703 IEM_MC_ARG(uint32_t, u32Src, 1);
10704 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10706
10707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10708 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10709 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
10710 if (pImpl->pfnLockedU16)
10711 IEMOP_HLP_DONE_DECODING();
10712 else
10713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10714 IEM_MC_FETCH_EFLAGS(EFlags);
10715 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10716 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10717 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10718 else
10719 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10720 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10721
10722 IEM_MC_COMMIT_EFLAGS(EFlags);
10723 IEM_MC_ADVANCE_RIP_AND_FINISH();
10724 IEM_MC_END();
10725 break;
10726
10727 case IEMMODE_64BIT:
10728 IEM_MC_BEGIN(3, 1);
10729 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10730 IEM_MC_ARG(uint64_t, u64Src, 1);
10731 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10733
10734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10735 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10736 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
10737 if (pImpl->pfnLockedU16)
10738 IEMOP_HLP_DONE_DECODING();
10739 else
10740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10741 IEM_MC_FETCH_EFLAGS(EFlags);
10742 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10743 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10744 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10745 else
10746 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10747 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10748
10749 IEM_MC_COMMIT_EFLAGS(EFlags);
10750 IEM_MC_ADVANCE_RIP_AND_FINISH();
10751 IEM_MC_END();
10752 break;
10753
10754 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10755 }
10756 }
10757}
10758
10759
10760/** Opcode 0x0f 0xbb. */
10761FNIEMOP_DEF(iemOp_btc_Ev_Gv)
10762{
10763 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
10764 IEMOP_HLP_MIN_386();
10765 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
10766}
10767
10768
10769/**
10770 * Common worker for BSF and BSR instructions.
10771 *
10772 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
10773 * the destination register, which means that for 32-bit operations the high
10774 * bits must be left alone.
10775 *
10776 * @param pImpl Pointer to the instruction implementation (assembly).
10777 */
10778FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
10779{
10780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10781
10782 /*
10783 * If rm is denoting a register, no more instruction bytes.
10784 */
10785 if (IEM_IS_MODRM_REG_MODE(bRm))
10786 {
10787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10788 switch (pVCpu->iem.s.enmEffOpSize)
10789 {
10790 case IEMMODE_16BIT:
10791 IEM_MC_BEGIN(3, 0);
10792 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10793 IEM_MC_ARG(uint16_t, u16Src, 1);
10794 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10795
10796 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10797 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10798 IEM_MC_REF_EFLAGS(pEFlags);
10799 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10800
10801 IEM_MC_ADVANCE_RIP_AND_FINISH();
10802 IEM_MC_END();
10803 break;
10804
10805 case IEMMODE_32BIT:
10806 IEM_MC_BEGIN(3, 0);
10807 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10808 IEM_MC_ARG(uint32_t, u32Src, 1);
10809 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10810
10811 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10812 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10813 IEM_MC_REF_EFLAGS(pEFlags);
10814 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10815 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10816 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10817 IEM_MC_ENDIF();
10818 IEM_MC_ADVANCE_RIP_AND_FINISH();
10819 IEM_MC_END();
10820 break;
10821
10822 case IEMMODE_64BIT:
10823 IEM_MC_BEGIN(3, 0);
10824 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10825 IEM_MC_ARG(uint64_t, u64Src, 1);
10826 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10827
10828 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10829 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10830 IEM_MC_REF_EFLAGS(pEFlags);
10831 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10832
10833 IEM_MC_ADVANCE_RIP_AND_FINISH();
10834 IEM_MC_END();
10835 break;
10836
10837 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10838 }
10839 }
10840 else
10841 {
10842 /*
10843 * We're accessing memory.
10844 */
10845 switch (pVCpu->iem.s.enmEffOpSize)
10846 {
10847 case IEMMODE_16BIT:
10848 IEM_MC_BEGIN(3, 1);
10849 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10850 IEM_MC_ARG(uint16_t, u16Src, 1);
10851 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10853
10854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10856 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10857 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10858 IEM_MC_REF_EFLAGS(pEFlags);
10859 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10860
10861 IEM_MC_ADVANCE_RIP_AND_FINISH();
10862 IEM_MC_END();
10863 break;
10864
10865 case IEMMODE_32BIT:
10866 IEM_MC_BEGIN(3, 1);
10867 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10868 IEM_MC_ARG(uint32_t, u32Src, 1);
10869 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10871
10872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10874 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10875 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10876 IEM_MC_REF_EFLAGS(pEFlags);
10877 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10878
10879 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10880 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10881 IEM_MC_ENDIF();
10882 IEM_MC_ADVANCE_RIP_AND_FINISH();
10883 IEM_MC_END();
10884 break;
10885
10886 case IEMMODE_64BIT:
10887 IEM_MC_BEGIN(3, 1);
10888 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10889 IEM_MC_ARG(uint64_t, u64Src, 1);
10890 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10892
10893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10895 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10896 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10897 IEM_MC_REF_EFLAGS(pEFlags);
10898 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10899
10900 IEM_MC_ADVANCE_RIP_AND_FINISH();
10901 IEM_MC_END();
10902 break;
10903
10904 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10905 }
10906 }
10907}
10908
10909
10910/** Opcode 0x0f 0xbc. */
10911FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
10912{
10913 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
10914 IEMOP_HLP_MIN_386();
10915 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10916 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
10917}
10918
10919
10920/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
10921FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
10922{
10923 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
10924 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
10925 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10926
10927#ifndef TST_IEM_CHECK_MC
10928 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
10929 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
10930 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
10931 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
10932 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
10933 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
10934 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
10935 {
10936 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
10937 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
10938 };
10939#endif
10940 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
10941 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
10942 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
10943}
10944
10945
10946/** Opcode 0x0f 0xbd. */
10947FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
10948{
10949 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
10950 IEMOP_HLP_MIN_386();
10951 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10952 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
10953}
10954
10955
10956/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
10957FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
10958{
10959 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
10960 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
10961 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10962
10963#ifndef TST_IEM_CHECK_MC
10964 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
10965 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
10966 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
10967 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
10968 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
10969 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
10970 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
10971 {
10972 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
10973 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
10974 };
10975#endif
10976 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
10977 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
10978 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
10979}
10980
10981
10982
10983/** Opcode 0x0f 0xbe. */
10984FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
10985{
10986 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
10987 IEMOP_HLP_MIN_386();
10988
10989 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10990
10991 /*
10992 * If rm is denoting a register, no more instruction bytes.
10993 */
10994 if (IEM_IS_MODRM_REG_MODE(bRm))
10995 {
10996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10997 switch (pVCpu->iem.s.enmEffOpSize)
10998 {
10999 case IEMMODE_16BIT:
11000 IEM_MC_BEGIN(0, 1);
11001 IEM_MC_LOCAL(uint16_t, u16Value);
11002 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11003 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11004 IEM_MC_ADVANCE_RIP_AND_FINISH();
11005 IEM_MC_END();
11006 break;
11007
11008 case IEMMODE_32BIT:
11009 IEM_MC_BEGIN(0, 1);
11010 IEM_MC_LOCAL(uint32_t, u32Value);
11011 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11012 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11013 IEM_MC_ADVANCE_RIP_AND_FINISH();
11014 IEM_MC_END();
11015 break;
11016
11017 case IEMMODE_64BIT:
11018 IEM_MC_BEGIN(0, 1);
11019 IEM_MC_LOCAL(uint64_t, u64Value);
11020 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11021 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11022 IEM_MC_ADVANCE_RIP_AND_FINISH();
11023 IEM_MC_END();
11024 break;
11025
11026 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11027 }
11028 }
11029 else
11030 {
11031 /*
11032 * We're loading a register from memory.
11033 */
11034 switch (pVCpu->iem.s.enmEffOpSize)
11035 {
11036 case IEMMODE_16BIT:
11037 IEM_MC_BEGIN(0, 2);
11038 IEM_MC_LOCAL(uint16_t, u16Value);
11039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11042 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11043 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11044 IEM_MC_ADVANCE_RIP_AND_FINISH();
11045 IEM_MC_END();
11046 break;
11047
11048 case IEMMODE_32BIT:
11049 IEM_MC_BEGIN(0, 2);
11050 IEM_MC_LOCAL(uint32_t, u32Value);
11051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11054 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11055 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11056 IEM_MC_ADVANCE_RIP_AND_FINISH();
11057 IEM_MC_END();
11058 break;
11059
11060 case IEMMODE_64BIT:
11061 IEM_MC_BEGIN(0, 2);
11062 IEM_MC_LOCAL(uint64_t, u64Value);
11063 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11066 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11067 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11068 IEM_MC_ADVANCE_RIP_AND_FINISH();
11069 IEM_MC_END();
11070 break;
11071
11072 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11073 }
11074 }
11075}
11076
11077
11078/** Opcode 0x0f 0xbf. */
11079FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11080{
11081 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11082 IEMOP_HLP_MIN_386();
11083
11084 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11085
11086 /** @todo Not entirely sure how the operand size prefix is handled here,
11087 * assuming that it will be ignored. Would be nice to have a few
11088 * test for this. */
11089 /*
11090 * If rm is denoting a register, no more instruction bytes.
11091 */
11092 if (IEM_IS_MODRM_REG_MODE(bRm))
11093 {
11094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11095 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11096 {
11097 IEM_MC_BEGIN(0, 1);
11098 IEM_MC_LOCAL(uint32_t, u32Value);
11099 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11100 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11101 IEM_MC_ADVANCE_RIP_AND_FINISH();
11102 IEM_MC_END();
11103 }
11104 else
11105 {
11106 IEM_MC_BEGIN(0, 1);
11107 IEM_MC_LOCAL(uint64_t, u64Value);
11108 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11109 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11110 IEM_MC_ADVANCE_RIP_AND_FINISH();
11111 IEM_MC_END();
11112 }
11113 }
11114 else
11115 {
11116 /*
11117 * We're loading a register from memory.
11118 */
11119 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11120 {
11121 IEM_MC_BEGIN(0, 2);
11122 IEM_MC_LOCAL(uint32_t, u32Value);
11123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11126 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11127 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11128 IEM_MC_ADVANCE_RIP_AND_FINISH();
11129 IEM_MC_END();
11130 }
11131 else
11132 {
11133 IEM_MC_BEGIN(0, 2);
11134 IEM_MC_LOCAL(uint64_t, u64Value);
11135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11138 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11139 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11140 IEM_MC_ADVANCE_RIP_AND_FINISH();
11141 IEM_MC_END();
11142 }
11143 }
11144}
11145
11146
11147/** Opcode 0x0f 0xc0. */
11148FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11149{
11150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11151 IEMOP_HLP_MIN_486();
11152 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11153
11154 /*
11155 * If rm is denoting a register, no more instruction bytes.
11156 */
11157 if (IEM_IS_MODRM_REG_MODE(bRm))
11158 {
11159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11160
11161 IEM_MC_BEGIN(3, 0);
11162 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11163 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11164 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11165
11166 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11167 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11168 IEM_MC_REF_EFLAGS(pEFlags);
11169 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11170
11171 IEM_MC_ADVANCE_RIP_AND_FINISH();
11172 IEM_MC_END();
11173 }
11174 else
11175 {
11176 /*
11177 * We're accessing memory.
11178 */
11179 IEM_MC_BEGIN(3, 3);
11180 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11181 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11182 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11183 IEM_MC_LOCAL(uint8_t, u8RegCopy);
11184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11185
11186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11187 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11188 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11189 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
11190 IEM_MC_FETCH_EFLAGS(EFlags);
11191 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11192 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11193 else
11194 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
11195
11196 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11197 IEM_MC_COMMIT_EFLAGS(EFlags);
11198 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
11199 IEM_MC_ADVANCE_RIP_AND_FINISH();
11200 IEM_MC_END();
11201 }
11202}
11203
11204
11205/** Opcode 0x0f 0xc1. */
11206FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11207{
11208 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11209 IEMOP_HLP_MIN_486();
11210 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11211
11212 /*
11213 * If rm is denoting a register, no more instruction bytes.
11214 */
11215 if (IEM_IS_MODRM_REG_MODE(bRm))
11216 {
11217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11218
11219 switch (pVCpu->iem.s.enmEffOpSize)
11220 {
11221 case IEMMODE_16BIT:
11222 IEM_MC_BEGIN(3, 0);
11223 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11224 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11225 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11226
11227 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11228 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11229 IEM_MC_REF_EFLAGS(pEFlags);
11230 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11231
11232 IEM_MC_ADVANCE_RIP_AND_FINISH();
11233 IEM_MC_END();
11234 break;
11235
11236 case IEMMODE_32BIT:
11237 IEM_MC_BEGIN(3, 0);
11238 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11239 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11240 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11241
11242 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11243 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11244 IEM_MC_REF_EFLAGS(pEFlags);
11245 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11246
11247 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11248 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11249 IEM_MC_ADVANCE_RIP_AND_FINISH();
11250 IEM_MC_END();
11251 break;
11252
11253 case IEMMODE_64BIT:
11254 IEM_MC_BEGIN(3, 0);
11255 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11256 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11257 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11258
11259 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11260 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11261 IEM_MC_REF_EFLAGS(pEFlags);
11262 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11263
11264 IEM_MC_ADVANCE_RIP_AND_FINISH();
11265 IEM_MC_END();
11266 break;
11267
11268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11269 }
11270 }
11271 else
11272 {
11273 /*
11274 * We're accessing memory.
11275 */
11276 switch (pVCpu->iem.s.enmEffOpSize)
11277 {
11278 case IEMMODE_16BIT:
11279 IEM_MC_BEGIN(3, 3);
11280 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11281 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11282 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11283 IEM_MC_LOCAL(uint16_t, u16RegCopy);
11284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11285
11286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11287 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11288 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11289 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
11290 IEM_MC_FETCH_EFLAGS(EFlags);
11291 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11292 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11293 else
11294 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
11295
11296 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
11297 IEM_MC_COMMIT_EFLAGS(EFlags);
11298 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
11299 IEM_MC_ADVANCE_RIP_AND_FINISH();
11300 IEM_MC_END();
11301 break;
11302
11303 case IEMMODE_32BIT:
11304 IEM_MC_BEGIN(3, 3);
11305 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11306 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11307 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11308 IEM_MC_LOCAL(uint32_t, u32RegCopy);
11309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11310
11311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11312 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11313 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11314 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
11315 IEM_MC_FETCH_EFLAGS(EFlags);
11316 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11317 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11318 else
11319 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
11320
11321 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
11322 IEM_MC_COMMIT_EFLAGS(EFlags);
11323 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
11324 IEM_MC_ADVANCE_RIP_AND_FINISH();
11325 IEM_MC_END();
11326 break;
11327
11328 case IEMMODE_64BIT:
11329 IEM_MC_BEGIN(3, 3);
11330 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11331 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11332 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11333 IEM_MC_LOCAL(uint64_t, u64RegCopy);
11334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11335
11336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11337 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11338 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11339 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
11340 IEM_MC_FETCH_EFLAGS(EFlags);
11341 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11342 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11343 else
11344 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
11345
11346 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
11347 IEM_MC_COMMIT_EFLAGS(EFlags);
11348 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
11349 IEM_MC_ADVANCE_RIP_AND_FINISH();
11350 IEM_MC_END();
11351 break;
11352
11353 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11354 }
11355 }
11356}
11357
11358
11359/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11360FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11361{
11362 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11363
11364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11365 if (IEM_IS_MODRM_REG_MODE(bRm))
11366 {
11367 /*
11368 * XMM, XMM.
11369 */
11370 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11372 IEM_MC_BEGIN(4, 2);
11373 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11374 IEM_MC_LOCAL(X86XMMREG, Dst);
11375 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11376 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11377 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11378 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11379 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11380 IEM_MC_PREPARE_SSE_USAGE();
11381 IEM_MC_REF_MXCSR(pfMxcsr);
11382 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11383 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11384 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11385 IEM_MC_IF_MXCSR_XCPT_PENDING()
11386 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11387 IEM_MC_ELSE()
11388 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11389 IEM_MC_ENDIF();
11390
11391 IEM_MC_ADVANCE_RIP_AND_FINISH();
11392 IEM_MC_END();
11393 }
11394 else
11395 {
11396 /*
11397 * XMM, [mem128].
11398 */
11399 IEM_MC_BEGIN(4, 3);
11400 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11401 IEM_MC_LOCAL(X86XMMREG, Dst);
11402 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11403 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11404 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11406
11407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11408 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11409 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11411 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11412 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11413
11414 IEM_MC_PREPARE_SSE_USAGE();
11415 IEM_MC_REF_MXCSR(pfMxcsr);
11416 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11417 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11418 IEM_MC_IF_MXCSR_XCPT_PENDING()
11419 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11420 IEM_MC_ELSE()
11421 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11422 IEM_MC_ENDIF();
11423
11424 IEM_MC_ADVANCE_RIP_AND_FINISH();
11425 IEM_MC_END();
11426 }
11427}
11428
11429
11430/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11431FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11432{
11433 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11434
11435 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11436 if (IEM_IS_MODRM_REG_MODE(bRm))
11437 {
11438 /*
11439 * XMM, XMM.
11440 */
11441 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11443 IEM_MC_BEGIN(4, 2);
11444 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11445 IEM_MC_LOCAL(X86XMMREG, Dst);
11446 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11447 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11448 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11449 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11450 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11451 IEM_MC_PREPARE_SSE_USAGE();
11452 IEM_MC_REF_MXCSR(pfMxcsr);
11453 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11454 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11455 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11456 IEM_MC_IF_MXCSR_XCPT_PENDING()
11457 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11458 IEM_MC_ELSE()
11459 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11460 IEM_MC_ENDIF();
11461
11462 IEM_MC_ADVANCE_RIP_AND_FINISH();
11463 IEM_MC_END();
11464 }
11465 else
11466 {
11467 /*
11468 * XMM, [mem128].
11469 */
11470 IEM_MC_BEGIN(4, 3);
11471 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11472 IEM_MC_LOCAL(X86XMMREG, Dst);
11473 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11474 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11475 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11477
11478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11479 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11480 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11482 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11483 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11484
11485 IEM_MC_PREPARE_SSE_USAGE();
11486 IEM_MC_REF_MXCSR(pfMxcsr);
11487 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11488 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11489 IEM_MC_IF_MXCSR_XCPT_PENDING()
11490 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11491 IEM_MC_ELSE()
11492 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11493 IEM_MC_ENDIF();
11494
11495 IEM_MC_ADVANCE_RIP_AND_FINISH();
11496 IEM_MC_END();
11497 }
11498}
11499
11500
11501/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11502FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11503{
11504 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11505
11506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11507 if (IEM_IS_MODRM_REG_MODE(bRm))
11508 {
11509 /*
11510 * XMM32, XMM32.
11511 */
11512 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11514 IEM_MC_BEGIN(4, 2);
11515 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11516 IEM_MC_LOCAL(X86XMMREG, Dst);
11517 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11518 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11519 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11520 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11521 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11522 IEM_MC_PREPARE_SSE_USAGE();
11523 IEM_MC_REF_MXCSR(pfMxcsr);
11524 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11525 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11526 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11527 IEM_MC_IF_MXCSR_XCPT_PENDING()
11528 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11529 IEM_MC_ELSE()
11530 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11531 IEM_MC_ENDIF();
11532
11533 IEM_MC_ADVANCE_RIP_AND_FINISH();
11534 IEM_MC_END();
11535 }
11536 else
11537 {
11538 /*
11539 * XMM32, [mem32].
11540 */
11541 IEM_MC_BEGIN(4, 3);
11542 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11543 IEM_MC_LOCAL(X86XMMREG, Dst);
11544 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11545 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11546 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11548
11549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11550 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11551 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11553 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11554 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11555
11556 IEM_MC_PREPARE_SSE_USAGE();
11557 IEM_MC_REF_MXCSR(pfMxcsr);
11558 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11559 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11560 IEM_MC_IF_MXCSR_XCPT_PENDING()
11561 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11562 IEM_MC_ELSE()
11563 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11564 IEM_MC_ENDIF();
11565
11566 IEM_MC_ADVANCE_RIP_AND_FINISH();
11567 IEM_MC_END();
11568 }
11569}
11570
11571
11572/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11573FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11574{
11575 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11576
11577 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11578 if (IEM_IS_MODRM_REG_MODE(bRm))
11579 {
11580 /*
11581 * XMM64, XMM64.
11582 */
11583 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11585 IEM_MC_BEGIN(4, 2);
11586 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11587 IEM_MC_LOCAL(X86XMMREG, Dst);
11588 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11589 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11590 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11591 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11592 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11593 IEM_MC_PREPARE_SSE_USAGE();
11594 IEM_MC_REF_MXCSR(pfMxcsr);
11595 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11596 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11597 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11598 IEM_MC_IF_MXCSR_XCPT_PENDING()
11599 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11600 IEM_MC_ELSE()
11601 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11602 IEM_MC_ENDIF();
11603
11604 IEM_MC_ADVANCE_RIP_AND_FINISH();
11605 IEM_MC_END();
11606 }
11607 else
11608 {
11609 /*
11610 * XMM64, [mem64].
11611 */
11612 IEM_MC_BEGIN(4, 3);
11613 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11614 IEM_MC_LOCAL(X86XMMREG, Dst);
11615 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11616 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11617 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11619
11620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11621 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11622 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11624 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11625 IEM_MC_FETCH_MEM_XMM_U64(Src.uSrc2, 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11626
11627 IEM_MC_PREPARE_SSE_USAGE();
11628 IEM_MC_REF_MXCSR(pfMxcsr);
11629 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11630 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11631 IEM_MC_IF_MXCSR_XCPT_PENDING()
11632 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11633 IEM_MC_ELSE()
11634 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11635 IEM_MC_ENDIF();
11636
11637 IEM_MC_ADVANCE_RIP_AND_FINISH();
11638 IEM_MC_END();
11639 }
11640}
11641
11642
11643/** Opcode 0x0f 0xc3. */
11644FNIEMOP_DEF(iemOp_movnti_My_Gy)
11645{
11646 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
11647
11648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11649
11650 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
11651 if (IEM_IS_MODRM_MEM_MODE(bRm))
11652 {
11653 switch (pVCpu->iem.s.enmEffOpSize)
11654 {
11655 case IEMMODE_32BIT:
11656 IEM_MC_BEGIN(0, 2);
11657 IEM_MC_LOCAL(uint32_t, u32Value);
11658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11659
11660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11662 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
11663 return IEMOP_RAISE_INVALID_OPCODE();
11664
11665 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11666 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11667 IEM_MC_ADVANCE_RIP_AND_FINISH();
11668 IEM_MC_END();
11669 break;
11670
11671 case IEMMODE_64BIT:
11672 IEM_MC_BEGIN(0, 2);
11673 IEM_MC_LOCAL(uint64_t, u64Value);
11674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11675
11676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11678 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
11679 return IEMOP_RAISE_INVALID_OPCODE();
11680
11681 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11682 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11683 IEM_MC_ADVANCE_RIP_AND_FINISH();
11684 IEM_MC_END();
11685 break;
11686
11687 case IEMMODE_16BIT:
11688 /** @todo check this form. */
11689 return IEMOP_RAISE_INVALID_OPCODE();
11690
11691 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11692 }
11693 }
11694 else
11695 return IEMOP_RAISE_INVALID_OPCODE();
11696}
11697
11698
11699/* Opcode 0x66 0x0f 0xc3 - invalid */
11700/* Opcode 0xf3 0x0f 0xc3 - invalid */
11701/* Opcode 0xf2 0x0f 0xc3 - invalid */
11702
11703
11704/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
11705FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
11706{
11707 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
11708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11709 if (IEM_IS_MODRM_REG_MODE(bRm))
11710 {
11711 /*
11712 * Register, register.
11713 */
11714 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11716 IEM_MC_BEGIN(3, 0);
11717 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11718 IEM_MC_ARG(uint16_t, u16Src, 1);
11719 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11720 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11721 IEM_MC_PREPARE_FPU_USAGE();
11722 IEM_MC_FPU_TO_MMX_MODE();
11723 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
11724 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11725 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
11726 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11727 IEM_MC_ADVANCE_RIP_AND_FINISH();
11728 IEM_MC_END();
11729 }
11730 else
11731 {
11732 /*
11733 * Register, memory.
11734 */
11735 IEM_MC_BEGIN(3, 1);
11736 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11737 IEM_MC_ARG(uint16_t, u16Src, 1);
11738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11739
11740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11741 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11742 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11744 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11745 IEM_MC_PREPARE_FPU_USAGE();
11746 IEM_MC_FPU_TO_MMX_MODE();
11747
11748 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11749 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
11750 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
11751 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11752 IEM_MC_ADVANCE_RIP_AND_FINISH();
11753 IEM_MC_END();
11754 }
11755}
11756
11757
11758/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
11759FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
11760{
11761 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11762 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11763 if (IEM_IS_MODRM_REG_MODE(bRm))
11764 {
11765 /*
11766 * Register, register.
11767 */
11768 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11770 IEM_MC_BEGIN(3, 0);
11771 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11772 IEM_MC_ARG(uint16_t, u16Src, 1);
11773 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11774 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11775 IEM_MC_PREPARE_SSE_USAGE();
11776 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11777 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11778 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
11779 IEM_MC_ADVANCE_RIP_AND_FINISH();
11780 IEM_MC_END();
11781 }
11782 else
11783 {
11784 /*
11785 * Register, memory.
11786 */
11787 IEM_MC_BEGIN(3, 2);
11788 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11789 IEM_MC_ARG(uint16_t, u16Src, 1);
11790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11791
11792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11793 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11794 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11796 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11797 IEM_MC_PREPARE_SSE_USAGE();
11798
11799 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11800 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11801 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
11802 IEM_MC_ADVANCE_RIP_AND_FINISH();
11803 IEM_MC_END();
11804 }
11805}
11806
11807
11808/* Opcode 0xf3 0x0f 0xc4 - invalid */
11809/* Opcode 0xf2 0x0f 0xc4 - invalid */
11810
11811
11812/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
11813FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
11814{
11815 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);*/ /** @todo */
11816 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11817 if (IEM_IS_MODRM_REG_MODE(bRm))
11818 {
11819 /*
11820 * Greg32, MMX, imm8.
11821 */
11822 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11824 IEM_MC_BEGIN(3, 1);
11825 IEM_MC_LOCAL(uint16_t, u16Dst);
11826 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11827 IEM_MC_ARG(uint64_t, u64Src, 1);
11828 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11829 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11830 IEM_MC_PREPARE_FPU_USAGE();
11831 IEM_MC_FPU_TO_MMX_MODE();
11832 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
11833 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bImmArg);
11834 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
11835 IEM_MC_ADVANCE_RIP_AND_FINISH();
11836 IEM_MC_END();
11837 }
11838 /* No memory operand. */
11839 else
11840 return IEMOP_RAISE_INVALID_OPCODE();
11841}
11842
11843
11844/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
11845FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
11846{
11847 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11849 if (IEM_IS_MODRM_REG_MODE(bRm))
11850 {
11851 /*
11852 * Greg32, XMM, imm8.
11853 */
11854 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11856 IEM_MC_BEGIN(3, 1);
11857 IEM_MC_LOCAL(uint16_t, u16Dst);
11858 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11859 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
11860 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11861 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11862 IEM_MC_PREPARE_SSE_USAGE();
11863 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11864 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bImmArg);
11865 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
11866 IEM_MC_ADVANCE_RIP_AND_FINISH();
11867 IEM_MC_END();
11868 }
11869 /* No memory operand. */
11870 else
11871 return IEMOP_RAISE_INVALID_OPCODE();
11872}
11873
11874
11875/* Opcode 0xf3 0x0f 0xc5 - invalid */
11876/* Opcode 0xf2 0x0f 0xc5 - invalid */
11877
11878
11879/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
11880FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
11881{
11882 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11884 if (IEM_IS_MODRM_REG_MODE(bRm))
11885 {
11886 /*
11887 * XMM, XMM, imm8.
11888 */
11889 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11891 IEM_MC_BEGIN(3, 0);
11892 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11893 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
11894 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11895 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11896 IEM_MC_PREPARE_SSE_USAGE();
11897 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11898 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11899 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
11900 IEM_MC_ADVANCE_RIP_AND_FINISH();
11901 IEM_MC_END();
11902 }
11903 else
11904 {
11905 /*
11906 * XMM, [mem128], imm8.
11907 */
11908 IEM_MC_BEGIN(3, 2);
11909 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11910 IEM_MC_LOCAL(RTUINT128U, uSrc);
11911 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
11912 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11913
11914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11915 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11916 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11918 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11919 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11920
11921 IEM_MC_PREPARE_SSE_USAGE();
11922 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11923 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
11924
11925 IEM_MC_ADVANCE_RIP_AND_FINISH();
11926 IEM_MC_END();
11927 }
11928}
11929
11930
11931/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
11932FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
11933{
11934 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11935 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11936 if (IEM_IS_MODRM_REG_MODE(bRm))
11937 {
11938 /*
11939 * XMM, XMM, imm8.
11940 */
11941 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11943 IEM_MC_BEGIN(3, 0);
11944 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11945 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
11946 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11947 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11948 IEM_MC_PREPARE_SSE_USAGE();
11949 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11950 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11951 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
11952 IEM_MC_ADVANCE_RIP_AND_FINISH();
11953 IEM_MC_END();
11954 }
11955 else
11956 {
11957 /*
11958 * XMM, [mem128], imm8.
11959 */
11960 IEM_MC_BEGIN(3, 2);
11961 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11962 IEM_MC_LOCAL(RTUINT128U, uSrc);
11963 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
11964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11965
11966 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11967 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11968 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11970 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11971 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11972
11973 IEM_MC_PREPARE_SSE_USAGE();
11974 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11975 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
11976
11977 IEM_MC_ADVANCE_RIP_AND_FINISH();
11978 IEM_MC_END();
11979 }
11980}
11981
11982
11983/* Opcode 0xf3 0x0f 0xc6 - invalid */
11984/* Opcode 0xf2 0x0f 0xc6 - invalid */
11985
11986
11987/** Opcode 0x0f 0xc7 !11/1. */
11988FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
11989{
11990 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
11991
11992 IEM_MC_BEGIN(4, 3);
11993 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
11994 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
11995 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
11996 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
11997 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
11998 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
11999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12000
12001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12002 IEMOP_HLP_DONE_DECODING();
12003 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12004
12005 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
12006 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
12007 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
12008
12009 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
12010 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
12011 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
12012
12013 IEM_MC_FETCH_EFLAGS(EFlags);
12014 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12015 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12016 else
12017 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12018
12019 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
12020 IEM_MC_COMMIT_EFLAGS(EFlags);
12021 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
12022 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
12023 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
12024 IEM_MC_ENDIF();
12025 IEM_MC_ADVANCE_RIP_AND_FINISH();
12026
12027 IEM_MC_END();
12028}
12029
12030
12031/** Opcode REX.W 0x0f 0xc7 !11/1. */
12032FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12033{
12034 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12035 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12036 {
12037#if 0
12038 RT_NOREF(bRm);
12039 IEMOP_BITCH_ABOUT_STUB();
12040 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
12041#else
12042 IEM_MC_BEGIN(4, 3);
12043 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
12044 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
12045 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
12046 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12047 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
12048 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
12049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12050
12051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12052 IEMOP_HLP_DONE_DECODING();
12053 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
12054 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12055
12056 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
12057 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
12058 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
12059
12060 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
12061 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
12062 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
12063
12064 IEM_MC_FETCH_EFLAGS(EFlags);
12065# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)
12066# if defined(RT_ARCH_AMD64)
12067 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12068# endif
12069 {
12070 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12071 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12072 else
12073 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12074 }
12075# if defined(RT_ARCH_AMD64)
12076 else
12077# endif
12078# endif
12079# if !defined(RT_ARCH_ARM64) /** @todo may need this for unaligned accesses... */
12080 {
12081 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12082 accesses and not all all atomic, which works fine on in UNI CPU guest
12083 configuration (ignoring DMA). If guest SMP is active we have no choice
12084 but to use a rendezvous callback here. Sigh. */
12085 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12086 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12087 else
12088 {
12089 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12090 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12091 }
12092 }
12093# endif
12094
12095 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
12096 IEM_MC_COMMIT_EFLAGS(EFlags);
12097 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
12098 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
12099 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
12100 IEM_MC_ENDIF();
12101 IEM_MC_ADVANCE_RIP_AND_FINISH();
12102
12103 IEM_MC_END();
12104#endif
12105 }
12106 Log(("cmpxchg16b -> #UD\n"));
12107 return IEMOP_RAISE_INVALID_OPCODE();
12108}
12109
12110FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12111{
12112 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12113 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12114 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12115}
12116
12117
12118/** Opcode 0x0f 0xc7 11/6. */
12119FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12120{
12121 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12122 return IEMOP_RAISE_INVALID_OPCODE();
12123
12124 if (IEM_IS_MODRM_REG_MODE(bRm))
12125 {
12126 /* register destination. */
12127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12128 switch (pVCpu->iem.s.enmEffOpSize)
12129 {
12130 case IEMMODE_16BIT:
12131 IEM_MC_BEGIN(2, 0);
12132 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12133 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12134
12135 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12136 IEM_MC_REF_EFLAGS(pEFlags);
12137 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u16, iemAImpl_rdrand_u16_fallback),
12138 pu16Dst, pEFlags);
12139
12140 IEM_MC_ADVANCE_RIP_AND_FINISH();
12141 IEM_MC_END();
12142 break;
12143
12144 case IEMMODE_32BIT:
12145 IEM_MC_BEGIN(2, 0);
12146 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12147 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12148
12149 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12150 IEM_MC_REF_EFLAGS(pEFlags);
12151 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u32, iemAImpl_rdrand_u32_fallback),
12152 pu32Dst, pEFlags);
12153
12154 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12155 IEM_MC_ADVANCE_RIP_AND_FINISH();
12156 IEM_MC_END();
12157 break;
12158
12159 case IEMMODE_64BIT:
12160 IEM_MC_BEGIN(2, 0);
12161 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12162 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12163
12164 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12165 IEM_MC_REF_EFLAGS(pEFlags);
12166 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u64, iemAImpl_rdrand_u64_fallback),
12167 pu64Dst, pEFlags);
12168
12169 IEM_MC_ADVANCE_RIP_AND_FINISH();
12170 IEM_MC_END();
12171 break;
12172
12173 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12174 }
12175 }
12176 /* Register only. */
12177 else
12178 return IEMOP_RAISE_INVALID_OPCODE();
12179}
12180
12181/** Opcode 0x0f 0xc7 !11/6. */
12182#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12183FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12184{
12185 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12186 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12187 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12188 IEM_MC_BEGIN(2, 0);
12189 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12190 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12192 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12193 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12194 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12195 IEM_MC_END();
12196 return VINF_SUCCESS;
12197}
12198#else
12199FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12200#endif
12201
12202/** Opcode 0x66 0x0f 0xc7 !11/6. */
12203#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12204FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12205{
12206 IEMOP_MNEMONIC(vmclear, "vmclear");
12207 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12208 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12209 IEM_MC_BEGIN(2, 0);
12210 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12211 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12213 IEMOP_HLP_DONE_DECODING();
12214 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12215 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12216 IEM_MC_END();
12217 return VINF_SUCCESS;
12218}
12219#else
12220FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12221#endif
12222
12223/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12224#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12225FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12226{
12227 IEMOP_MNEMONIC(vmxon, "vmxon");
12228 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12229 IEM_MC_BEGIN(2, 0);
12230 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12231 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12233 IEMOP_HLP_DONE_DECODING();
12234 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12235 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12236 IEM_MC_END();
12237 return VINF_SUCCESS;
12238}
12239#else
12240FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12241#endif
12242
12243/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12244#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12245FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12246{
12247 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12248 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12249 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12250 IEM_MC_BEGIN(2, 0);
12251 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12252 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12254 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12255 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12256 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12257 IEM_MC_END();
12258 return VINF_SUCCESS;
12259}
12260#else
12261FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12262#endif
12263
12264/** Opcode 0x0f 0xc7 11/7. */
12265FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12266{
12267 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12268 return IEMOP_RAISE_INVALID_OPCODE();
12269
12270 if (IEM_IS_MODRM_REG_MODE(bRm))
12271 {
12272 /* register destination. */
12273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12274 switch (pVCpu->iem.s.enmEffOpSize)
12275 {
12276 case IEMMODE_16BIT:
12277 IEM_MC_BEGIN(2, 0);
12278 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12279 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12280
12281 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12282 IEM_MC_REF_EFLAGS(pEFlags);
12283 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u16, iemAImpl_rdseed_u16_fallback),
12284 pu16Dst, pEFlags);
12285
12286 IEM_MC_ADVANCE_RIP_AND_FINISH();
12287 IEM_MC_END();
12288 break;
12289
12290 case IEMMODE_32BIT:
12291 IEM_MC_BEGIN(2, 0);
12292 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12293 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12294
12295 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12296 IEM_MC_REF_EFLAGS(pEFlags);
12297 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u32, iemAImpl_rdseed_u32_fallback),
12298 pu32Dst, pEFlags);
12299
12300 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12301 IEM_MC_ADVANCE_RIP_AND_FINISH();
12302 IEM_MC_END();
12303 break;
12304
12305 case IEMMODE_64BIT:
12306 IEM_MC_BEGIN(2, 0);
12307 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12308 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12309
12310 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12311 IEM_MC_REF_EFLAGS(pEFlags);
12312 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u64, iemAImpl_rdseed_u64_fallback),
12313 pu64Dst, pEFlags);
12314
12315 IEM_MC_ADVANCE_RIP_AND_FINISH();
12316 IEM_MC_END();
12317 break;
12318
12319 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12320 }
12321 }
12322 /* Register only. */
12323 else
12324 return IEMOP_RAISE_INVALID_OPCODE();
12325}
12326
12327/**
12328 * Group 9 jump table for register variant.
12329 */
12330IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12331{ /* pfx: none, 066h, 0f3h, 0f2h */
12332 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12333 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12334 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12335 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12336 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12337 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12338 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12339 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12340};
12341AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12342
12343
12344/**
12345 * Group 9 jump table for memory variant.
12346 */
12347IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12348{ /* pfx: none, 066h, 0f3h, 0f2h */
12349 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12350 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12351 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12352 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12353 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12354 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12355 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12356 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12357};
12358AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12359
12360
12361/** Opcode 0x0f 0xc7. */
12362FNIEMOP_DEF(iemOp_Grp9)
12363{
12364 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
12365 if (IEM_IS_MODRM_REG_MODE(bRm))
12366 /* register, register */
12367 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12368 + pVCpu->iem.s.idxPrefix], bRm);
12369 /* memory, register */
12370 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12371 + pVCpu->iem.s.idxPrefix], bRm);
12372}
12373
12374
12375/**
12376 * Common 'bswap register' helper.
12377 */
12378FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12379{
12380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12381 switch (pVCpu->iem.s.enmEffOpSize)
12382 {
12383 case IEMMODE_16BIT:
12384 IEM_MC_BEGIN(1, 0);
12385 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12386 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12387 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12388 IEM_MC_ADVANCE_RIP_AND_FINISH();
12389 IEM_MC_END();
12390 break;
12391
12392 case IEMMODE_32BIT:
12393 IEM_MC_BEGIN(1, 0);
12394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12395 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12396 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12397 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12398 IEM_MC_ADVANCE_RIP_AND_FINISH();
12399 IEM_MC_END();
12400 break;
12401
12402 case IEMMODE_64BIT:
12403 IEM_MC_BEGIN(1, 0);
12404 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12405 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12406 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12407 IEM_MC_ADVANCE_RIP_AND_FINISH();
12408 IEM_MC_END();
12409 break;
12410
12411 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12412 }
12413}
12414
12415
12416/** Opcode 0x0f 0xc8. */
12417FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12418{
12419 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12420 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12421 prefix. REX.B is the correct prefix it appears. For a parallel
12422 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12423 IEMOP_HLP_MIN_486();
12424 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12425}
12426
12427
12428/** Opcode 0x0f 0xc9. */
12429FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12430{
12431 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12432 IEMOP_HLP_MIN_486();
12433 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12434}
12435
12436
12437/** Opcode 0x0f 0xca. */
12438FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12439{
12440 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12441 IEMOP_HLP_MIN_486();
12442 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12443}
12444
12445
12446/** Opcode 0x0f 0xcb. */
12447FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12448{
12449 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12450 IEMOP_HLP_MIN_486();
12451 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12452}
12453
12454
12455/** Opcode 0x0f 0xcc. */
12456FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12457{
12458 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12459 IEMOP_HLP_MIN_486();
12460 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12461}
12462
12463
12464/** Opcode 0x0f 0xcd. */
12465FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12466{
12467 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12468 IEMOP_HLP_MIN_486();
12469 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12470}
12471
12472
12473/** Opcode 0x0f 0xce. */
12474FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12475{
12476 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12477 IEMOP_HLP_MIN_486();
12478 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12479}
12480
12481
12482/** Opcode 0x0f 0xcf. */
12483FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12484{
12485 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12486 IEMOP_HLP_MIN_486();
12487 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12488}
12489
12490
12491/* Opcode 0x0f 0xd0 - invalid */
12492
12493
12494/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12495FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12496{
12497 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12498 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12499}
12500
12501
12502/* Opcode 0xf3 0x0f 0xd0 - invalid */
12503
12504
12505/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12506FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12507{
12508 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12509 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12510}
12511
12512
12513
12514/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12515FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12516{
12517 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12518 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12519}
12520
12521/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12522FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12523{
12524 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12525 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12526}
12527
12528/* Opcode 0xf3 0x0f 0xd1 - invalid */
12529/* Opcode 0xf2 0x0f 0xd1 - invalid */
12530
12531/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12532FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12533{
12534 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12535 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12536}
12537
12538
12539/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12540FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12541{
12542 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12543 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12544}
12545
12546
12547/* Opcode 0xf3 0x0f 0xd2 - invalid */
12548/* Opcode 0xf2 0x0f 0xd2 - invalid */
12549
12550/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12551FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12552{
12553 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12554 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12555}
12556
12557
12558/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12559FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12560{
12561 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12562 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12563}
12564
12565
12566/* Opcode 0xf3 0x0f 0xd3 - invalid */
12567/* Opcode 0xf2 0x0f 0xd3 - invalid */
12568
12569
12570/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12571FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12572{
12573 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12574 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_paddq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
12575}
12576
12577
12578/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12579FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12580{
12581 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12582 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
12583}
12584
12585
12586/* Opcode 0xf3 0x0f 0xd4 - invalid */
12587/* Opcode 0xf2 0x0f 0xd4 - invalid */
12588
12589/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12590FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12591{
12592 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12593 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
12594}
12595
12596/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12597FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12598{
12599 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12600 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
12601}
12602
12603
12604/* Opcode 0xf3 0x0f 0xd5 - invalid */
12605/* Opcode 0xf2 0x0f 0xd5 - invalid */
12606
12607/* Opcode 0x0f 0xd6 - invalid */
12608
12609/**
12610 * @opcode 0xd6
12611 * @oppfx 0x66
12612 * @opcpuid sse2
12613 * @opgroup og_sse2_pcksclr_datamove
12614 * @opxcpttype none
12615 * @optest op1=-1 op2=2 -> op1=2
12616 * @optest op1=0 op2=-42 -> op1=-42
12617 */
12618FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12619{
12620 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12621 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12622 if (IEM_IS_MODRM_REG_MODE(bRm))
12623 {
12624 /*
12625 * Register, register.
12626 */
12627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12628 IEM_MC_BEGIN(0, 2);
12629 IEM_MC_LOCAL(uint64_t, uSrc);
12630
12631 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12632 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12633
12634 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
12635 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12636
12637 IEM_MC_ADVANCE_RIP_AND_FINISH();
12638 IEM_MC_END();
12639 }
12640 else
12641 {
12642 /*
12643 * Memory, register.
12644 */
12645 IEM_MC_BEGIN(0, 2);
12646 IEM_MC_LOCAL(uint64_t, uSrc);
12647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12648
12649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12651 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12653
12654 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
12655 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12656
12657 IEM_MC_ADVANCE_RIP_AND_FINISH();
12658 IEM_MC_END();
12659 }
12660}
12661
12662
12663/**
12664 * @opcode 0xd6
12665 * @opcodesub 11 mr/reg
12666 * @oppfx f3
12667 * @opcpuid sse2
12668 * @opgroup og_sse2_simdint_datamove
12669 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12670 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12671 */
12672FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12673{
12674 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12675 if (IEM_IS_MODRM_REG_MODE(bRm))
12676 {
12677 /*
12678 * Register, register.
12679 */
12680 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12682 IEM_MC_BEGIN(0, 1);
12683 IEM_MC_LOCAL(uint64_t, uSrc);
12684
12685 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12686 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12687 IEM_MC_FPU_TO_MMX_MODE();
12688
12689 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
12690 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
12691
12692 IEM_MC_ADVANCE_RIP_AND_FINISH();
12693 IEM_MC_END();
12694 }
12695
12696 /**
12697 * @opdone
12698 * @opmnemonic udf30fd6mem
12699 * @opcode 0xd6
12700 * @opcodesub !11 mr/reg
12701 * @oppfx f3
12702 * @opunused intel-modrm
12703 * @opcpuid sse
12704 * @optest ->
12705 */
12706 else
12707 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12708}
12709
12710
12711/**
12712 * @opcode 0xd6
12713 * @opcodesub 11 mr/reg
12714 * @oppfx f2
12715 * @opcpuid sse2
12716 * @opgroup og_sse2_simdint_datamove
12717 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12718 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12719 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
12720 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
12721 * @optest op1=-42 op2=0xfedcba9876543210
12722 * -> op1=0xfedcba9876543210 ftw=0xff
12723 */
12724FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
12725{
12726 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12727 if (IEM_IS_MODRM_REG_MODE(bRm))
12728 {
12729 /*
12730 * Register, register.
12731 */
12732 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12734 IEM_MC_BEGIN(0, 1);
12735 IEM_MC_LOCAL(uint64_t, uSrc);
12736
12737 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12738 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12739 IEM_MC_FPU_TO_MMX_MODE();
12740
12741 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12742 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
12743
12744 IEM_MC_ADVANCE_RIP_AND_FINISH();
12745 IEM_MC_END();
12746 }
12747
12748 /**
12749 * @opdone
12750 * @opmnemonic udf20fd6mem
12751 * @opcode 0xd6
12752 * @opcodesub !11 mr/reg
12753 * @oppfx f2
12754 * @opunused intel-modrm
12755 * @opcpuid sse
12756 * @optest ->
12757 */
12758 else
12759 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12760}
12761
12762
12763/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
12764FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
12765{
12766 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12767 /* Docs says register only. */
12768 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12769 {
12770 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12771 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS, 0);
12772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12773 IEM_MC_BEGIN(2, 0);
12774 IEM_MC_ARG(uint64_t *, puDst, 0);
12775 IEM_MC_ARG(uint64_t const *, puSrc, 1);
12776 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
12777 IEM_MC_PREPARE_FPU_USAGE();
12778 IEM_MC_FPU_TO_MMX_MODE();
12779
12780 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12781 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
12782 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
12783
12784 IEM_MC_ADVANCE_RIP_AND_FINISH();
12785 IEM_MC_END();
12786 }
12787 else
12788 return IEMOP_RAISE_INVALID_OPCODE();
12789}
12790
12791
12792/** Opcode 0x66 0x0f 0xd7 - */
12793FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
12794{
12795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12796 /* Docs says register only. */
12797 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12798 {
12799 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12800 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
12801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12802 IEM_MC_BEGIN(2, 0);
12803 IEM_MC_ARG(uint64_t *, puDst, 0);
12804 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12805 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12806 IEM_MC_PREPARE_SSE_USAGE();
12807 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12808 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12809 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
12810 IEM_MC_ADVANCE_RIP_AND_FINISH();
12811 IEM_MC_END();
12812 }
12813 else
12814 return IEMOP_RAISE_INVALID_OPCODE();
12815}
12816
12817
12818/* Opcode 0xf3 0x0f 0xd7 - invalid */
12819/* Opcode 0xf2 0x0f 0xd7 - invalid */
12820
12821
12822/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
12823FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
12824{
12825 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12826 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
12827}
12828
12829
12830/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
12831FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
12832{
12833 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12834 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
12835}
12836
12837
12838/* Opcode 0xf3 0x0f 0xd8 - invalid */
12839/* Opcode 0xf2 0x0f 0xd8 - invalid */
12840
12841/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
12842FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
12843{
12844 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12845 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
12846}
12847
12848
12849/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
12850FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
12851{
12852 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12853 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
12854}
12855
12856
12857/* Opcode 0xf3 0x0f 0xd9 - invalid */
12858/* Opcode 0xf2 0x0f 0xd9 - invalid */
12859
12860/** Opcode 0x0f 0xda - pminub Pq, Qq */
12861FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
12862{
12863 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12864 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
12865}
12866
12867
12868/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
12869FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
12870{
12871 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12872 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
12873}
12874
12875/* Opcode 0xf3 0x0f 0xda - invalid */
12876/* Opcode 0xf2 0x0f 0xda - invalid */
12877
12878/** Opcode 0x0f 0xdb - pand Pq, Qq */
12879FNIEMOP_DEF(iemOp_pand_Pq_Qq)
12880{
12881 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12882 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
12883}
12884
12885
12886/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
12887FNIEMOP_DEF(iemOp_pand_Vx_Wx)
12888{
12889 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12890 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
12891}
12892
12893
12894/* Opcode 0xf3 0x0f 0xdb - invalid */
12895/* Opcode 0xf2 0x0f 0xdb - invalid */
12896
12897/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
12898FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
12899{
12900 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12901 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
12902}
12903
12904
12905/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
12906FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
12907{
12908 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12909 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
12910}
12911
12912
12913/* Opcode 0xf3 0x0f 0xdc - invalid */
12914/* Opcode 0xf2 0x0f 0xdc - invalid */
12915
12916/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
12917FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
12918{
12919 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12920 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
12921}
12922
12923
12924/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
12925FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
12926{
12927 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12928 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
12929}
12930
12931
12932/* Opcode 0xf3 0x0f 0xdd - invalid */
12933/* Opcode 0xf2 0x0f 0xdd - invalid */
12934
12935/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
12936FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
12937{
12938 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12939 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
12940}
12941
12942
12943/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
12944FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
12945{
12946 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12947 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
12948}
12949
12950/* Opcode 0xf3 0x0f 0xde - invalid */
12951/* Opcode 0xf2 0x0f 0xde - invalid */
12952
12953
12954/** Opcode 0x0f 0xdf - pandn Pq, Qq */
12955FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
12956{
12957 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12958 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
12959}
12960
12961
12962/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
12963FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
12964{
12965 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12966 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
12967}
12968
12969
12970/* Opcode 0xf3 0x0f 0xdf - invalid */
12971/* Opcode 0xf2 0x0f 0xdf - invalid */
12972
12973/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
12974FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
12975{
12976 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12977 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
12978}
12979
12980
12981/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
12982FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
12983{
12984 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12985 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
12986}
12987
12988
12989/* Opcode 0xf3 0x0f 0xe0 - invalid */
12990/* Opcode 0xf2 0x0f 0xe0 - invalid */
12991
12992/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
12993FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
12994{
12995 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12996 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
12997}
12998
12999
13000/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13001FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13002{
13003 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13004 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13005}
13006
13007
13008/* Opcode 0xf3 0x0f 0xe1 - invalid */
13009/* Opcode 0xf2 0x0f 0xe1 - invalid */
13010
13011/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13012FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13013{
13014 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13015 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13016}
13017
13018
13019/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13020FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13021{
13022 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13023 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13024}
13025
13026
13027/* Opcode 0xf3 0x0f 0xe2 - invalid */
13028/* Opcode 0xf2 0x0f 0xe2 - invalid */
13029
13030/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13031FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13032{
13033 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13034 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13035}
13036
13037
13038/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13039FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13040{
13041 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13042 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13043}
13044
13045
13046/* Opcode 0xf3 0x0f 0xe3 - invalid */
13047/* Opcode 0xf2 0x0f 0xe3 - invalid */
13048
13049/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13050FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13051{
13052 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13053 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13054}
13055
13056
13057/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13058FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13059{
13060 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13061 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13062}
13063
13064
13065/* Opcode 0xf3 0x0f 0xe4 - invalid */
13066/* Opcode 0xf2 0x0f 0xe4 - invalid */
13067
13068/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13069FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13070{
13071 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13072 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
13073}
13074
13075
13076/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13077FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13078{
13079 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13080 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
13081}
13082
13083
13084/* Opcode 0xf3 0x0f 0xe5 - invalid */
13085/* Opcode 0xf2 0x0f 0xe5 - invalid */
13086/* Opcode 0x0f 0xe6 - invalid */
13087
13088
13089/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13090FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13091{
13092 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13093 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13094}
13095
13096
13097/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13098FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13099{
13100 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13101 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13102}
13103
13104
13105/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13106FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13107{
13108 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13109 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13110}
13111
13112
13113/**
13114 * @opcode 0xe7
13115 * @opcodesub !11 mr/reg
13116 * @oppfx none
13117 * @opcpuid sse
13118 * @opgroup og_sse1_cachect
13119 * @opxcpttype none
13120 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13121 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13122 */
13123FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13124{
13125 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13127 if (IEM_IS_MODRM_MEM_MODE(bRm))
13128 {
13129 /* Register, memory. */
13130 IEM_MC_BEGIN(0, 2);
13131 IEM_MC_LOCAL(uint64_t, uSrc);
13132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13133
13134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13136 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13137 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13138 IEM_MC_FPU_TO_MMX_MODE();
13139
13140 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13141 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13142
13143 IEM_MC_ADVANCE_RIP_AND_FINISH();
13144 IEM_MC_END();
13145 }
13146 /**
13147 * @opdone
13148 * @opmnemonic ud0fe7reg
13149 * @opcode 0xe7
13150 * @opcodesub 11 mr/reg
13151 * @oppfx none
13152 * @opunused immediate
13153 * @opcpuid sse
13154 * @optest ->
13155 */
13156 else
13157 return IEMOP_RAISE_INVALID_OPCODE();
13158}
13159
13160/**
13161 * @opcode 0xe7
13162 * @opcodesub !11 mr/reg
13163 * @oppfx 0x66
13164 * @opcpuid sse2
13165 * @opgroup og_sse2_cachect
13166 * @opxcpttype 1
13167 * @optest op1=-1 op2=2 -> op1=2
13168 * @optest op1=0 op2=-42 -> op1=-42
13169 */
13170FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13171{
13172 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13173 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13174 if (IEM_IS_MODRM_MEM_MODE(bRm))
13175 {
13176 /* Register, memory. */
13177 IEM_MC_BEGIN(0, 2);
13178 IEM_MC_LOCAL(RTUINT128U, uSrc);
13179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13180
13181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13183 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
13184 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13185
13186 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13187 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13188
13189 IEM_MC_ADVANCE_RIP_AND_FINISH();
13190 IEM_MC_END();
13191 }
13192
13193 /**
13194 * @opdone
13195 * @opmnemonic ud660fe7reg
13196 * @opcode 0xe7
13197 * @opcodesub 11 mr/reg
13198 * @oppfx 0x66
13199 * @opunused immediate
13200 * @opcpuid sse
13201 * @optest ->
13202 */
13203 else
13204 return IEMOP_RAISE_INVALID_OPCODE();
13205}
13206
13207/* Opcode 0xf3 0x0f 0xe7 - invalid */
13208/* Opcode 0xf2 0x0f 0xe7 - invalid */
13209
13210
13211/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13212FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13213{
13214 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13215 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
13216}
13217
13218
13219/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13220FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13221{
13222 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13223 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
13224}
13225
13226
13227/* Opcode 0xf3 0x0f 0xe8 - invalid */
13228/* Opcode 0xf2 0x0f 0xe8 - invalid */
13229
13230/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13231FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13232{
13233 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13234 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
13235}
13236
13237
13238/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13239FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13240{
13241 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13242 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
13243}
13244
13245
13246/* Opcode 0xf3 0x0f 0xe9 - invalid */
13247/* Opcode 0xf2 0x0f 0xe9 - invalid */
13248
13249
13250/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13251FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13252{
13253 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13254 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
13255}
13256
13257
13258/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13259FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13260{
13261 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13262 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
13263}
13264
13265
13266/* Opcode 0xf3 0x0f 0xea - invalid */
13267/* Opcode 0xf2 0x0f 0xea - invalid */
13268
13269
13270/** Opcode 0x0f 0xeb - por Pq, Qq */
13271FNIEMOP_DEF(iemOp_por_Pq_Qq)
13272{
13273 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13274 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
13275}
13276
13277
13278/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13279FNIEMOP_DEF(iemOp_por_Vx_Wx)
13280{
13281 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13282 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
13283}
13284
13285
13286/* Opcode 0xf3 0x0f 0xeb - invalid */
13287/* Opcode 0xf2 0x0f 0xeb - invalid */
13288
13289/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13290FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13291{
13292 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13293 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
13294}
13295
13296
13297/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13298FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13299{
13300 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13301 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
13302}
13303
13304
13305/* Opcode 0xf3 0x0f 0xec - invalid */
13306/* Opcode 0xf2 0x0f 0xec - invalid */
13307
13308/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13309FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13310{
13311 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13312 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
13313}
13314
13315
13316/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13317FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13318{
13319 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13320 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
13321}
13322
13323
13324/* Opcode 0xf3 0x0f 0xed - invalid */
13325/* Opcode 0xf2 0x0f 0xed - invalid */
13326
13327
13328/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13329FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13330{
13331 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13332 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13333}
13334
13335
13336/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13337FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13338{
13339 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13340 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13341}
13342
13343
13344/* Opcode 0xf3 0x0f 0xee - invalid */
13345/* Opcode 0xf2 0x0f 0xee - invalid */
13346
13347
13348/** Opcode 0x0f 0xef - pxor Pq, Qq */
13349FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13350{
13351 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13352 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
13353}
13354
13355
13356/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13357FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13358{
13359 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13360 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
13361}
13362
13363
13364/* Opcode 0xf3 0x0f 0xef - invalid */
13365/* Opcode 0xf2 0x0f 0xef - invalid */
13366
13367/* Opcode 0x0f 0xf0 - invalid */
13368/* Opcode 0x66 0x0f 0xf0 - invalid */
13369
13370
13371/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13372FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13373{
13374 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13375 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13376 if (IEM_IS_MODRM_REG_MODE(bRm))
13377 {
13378 /*
13379 * Register, register - (not implemented, assuming it raises \#UD).
13380 */
13381 return IEMOP_RAISE_INVALID_OPCODE();
13382 }
13383 else
13384 {
13385 /*
13386 * Register, memory.
13387 */
13388 IEM_MC_BEGIN(0, 2);
13389 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13391
13392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13394 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
13395 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13396 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13397 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13398
13399 IEM_MC_ADVANCE_RIP_AND_FINISH();
13400 IEM_MC_END();
13401 }
13402}
13403
13404
13405/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13406FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13407{
13408 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13409 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13410}
13411
13412
13413/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13414FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13415{
13416 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13417 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13418}
13419
13420
13421/* Opcode 0xf2 0x0f 0xf1 - invalid */
13422
13423/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13424FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13425{
13426 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13427 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13428}
13429
13430
13431/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13432FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13433{
13434 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13435 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13436}
13437
13438
13439/* Opcode 0xf2 0x0f 0xf2 - invalid */
13440
13441/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13442FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13443{
13444 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13445 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13446}
13447
13448
13449/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13450FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13451{
13452 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13453 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13454}
13455
13456/* Opcode 0xf2 0x0f 0xf3 - invalid */
13457
13458/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13459FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13460{
13461 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13462 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
13463}
13464
13465
13466/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13467FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13468{
13469 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13470 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
13471}
13472
13473
13474/* Opcode 0xf2 0x0f 0xf4 - invalid */
13475
13476/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13477FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13478{
13479 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13480 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13481}
13482
13483
13484/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13485FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13486{
13487 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13488 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13489}
13490
13491/* Opcode 0xf2 0x0f 0xf5 - invalid */
13492
13493/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13494FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13495{
13496 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13497 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13498}
13499
13500
13501/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13502FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13503{
13504 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13505 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13506}
13507
13508
13509/* Opcode 0xf2 0x0f 0xf6 - invalid */
13510
13511/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13512FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13513/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13514FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13515/* Opcode 0xf2 0x0f 0xf7 - invalid */
13516
13517
13518/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13519FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13520{
13521 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13522 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
13523}
13524
13525
13526/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13527FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13528{
13529 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13530 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
13531}
13532
13533
13534/* Opcode 0xf2 0x0f 0xf8 - invalid */
13535
13536
13537/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13538FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13539{
13540 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13541 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
13542}
13543
13544
13545/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13546FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13547{
13548 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13549 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
13550}
13551
13552
13553/* Opcode 0xf2 0x0f 0xf9 - invalid */
13554
13555
13556/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13557FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13558{
13559 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13560 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
13561}
13562
13563
13564/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13565FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13566{
13567 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13568 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
13569}
13570
13571
13572/* Opcode 0xf2 0x0f 0xfa - invalid */
13573
13574
13575/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13576FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13577{
13578 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13579 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_psubq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
13580}
13581
13582
13583/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13584FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13585{
13586 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13587 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
13588}
13589
13590
13591/* Opcode 0xf2 0x0f 0xfb - invalid */
13592
13593
13594/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13595FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13596{
13597 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13598 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
13599}
13600
13601
13602/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13603FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13604{
13605 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13606 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
13607}
13608
13609
13610/* Opcode 0xf2 0x0f 0xfc - invalid */
13611
13612
13613/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13614FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13615{
13616 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13617 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
13618}
13619
13620
13621/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
13622FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
13623{
13624 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13625 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
13626}
13627
13628
13629/* Opcode 0xf2 0x0f 0xfd - invalid */
13630
13631
13632/** Opcode 0x0f 0xfe - paddd Pq, Qq */
13633FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
13634{
13635 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13636 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
13637}
13638
13639
13640/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
13641FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
13642{
13643 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13644 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
13645}
13646
13647
13648/* Opcode 0xf2 0x0f 0xfe - invalid */
13649
13650
13651/** Opcode **** 0x0f 0xff - UD0 */
13652FNIEMOP_DEF(iemOp_ud0)
13653{
13654 IEMOP_MNEMONIC(ud0, "ud0");
13655 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
13656 {
13657 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
13658#ifndef TST_IEM_CHECK_MC
13659 if (IEM_IS_MODRM_MEM_MODE(bRm))
13660 {
13661 RTGCPTR GCPtrEff;
13662 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
13663 if (rcStrict != VINF_SUCCESS)
13664 return rcStrict;
13665 }
13666#endif
13667 IEMOP_HLP_DONE_DECODING();
13668 }
13669 return IEMOP_RAISE_INVALID_OPCODE();
13670}
13671
13672
13673
13674/**
13675 * Two byte opcode map, first byte 0x0f.
13676 *
13677 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
13678 * check if it needs updating as well when making changes.
13679 */
13680IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
13681{
13682 /* no prefix, 066h prefix f3h prefix, f2h prefix */
13683 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
13684 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
13685 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
13686 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
13687 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
13688 /* 0x05 */ IEMOP_X4(iemOp_syscall),
13689 /* 0x06 */ IEMOP_X4(iemOp_clts),
13690 /* 0x07 */ IEMOP_X4(iemOp_sysret),
13691 /* 0x08 */ IEMOP_X4(iemOp_invd),
13692 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
13693 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
13694 /* 0x0b */ IEMOP_X4(iemOp_ud2),
13695 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
13696 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
13697 /* 0x0e */ IEMOP_X4(iemOp_femms),
13698 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
13699
13700 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
13701 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
13702 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
13703 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13704 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13705 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13706 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
13707 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13708 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
13709 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
13710 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
13711 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
13712 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
13713 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
13714 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
13715 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
13716
13717 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
13718 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
13719 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
13720 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
13721 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
13722 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13723 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
13724 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13725 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13726 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13727 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
13728 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13729 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
13730 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
13731 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13732 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13733
13734 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
13735 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
13736 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
13737 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
13738 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
13739 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
13740 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
13741 /* 0x37 */ IEMOP_X4(iemOp_getsec),
13742 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
13743 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13744 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
13745 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13746 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13747 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13748 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13749 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13750
13751 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
13752 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
13753 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
13754 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
13755 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
13756 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
13757 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
13758 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
13759 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
13760 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
13761 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
13762 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
13763 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
13764 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
13765 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
13766 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
13767
13768 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13769 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
13770 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
13771 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
13772 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13773 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13774 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13775 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13776 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
13777 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
13778 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
13779 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
13780 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
13781 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
13782 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
13783 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
13784
13785 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13786 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13787 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13788 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13789 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13790 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13791 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13792 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13793 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13794 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13795 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13796 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13797 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13798 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13799 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13800 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
13801
13802 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
13803 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
13804 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
13805 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
13806 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13807 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13808 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13809 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13810
13811 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13812 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13813 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13814 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13815 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
13816 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
13817 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
13818 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
13819
13820 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
13821 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
13822 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
13823 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
13824 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
13825 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
13826 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
13827 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
13828 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
13829 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
13830 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
13831 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
13832 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
13833 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
13834 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
13835 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
13836
13837 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
13838 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
13839 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
13840 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
13841 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
13842 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
13843 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
13844 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
13845 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
13846 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
13847 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
13848 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
13849 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
13850 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
13851 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
13852 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
13853
13854 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
13855 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
13856 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
13857 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
13858 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
13859 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
13860 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
13861 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
13862 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
13863 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
13864 /* 0xaa */ IEMOP_X4(iemOp_rsm),
13865 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
13866 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
13867 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
13868 /* 0xae */ IEMOP_X4(iemOp_Grp15),
13869 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
13870
13871 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
13872 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
13873 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
13874 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
13875 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
13876 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
13877 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
13878 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
13879 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
13880 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
13881 /* 0xba */ IEMOP_X4(iemOp_Grp8),
13882 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
13883 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
13884 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
13885 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
13886 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
13887
13888 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
13889 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
13890 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
13891 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13892 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13893 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13894 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13895 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
13896 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
13897 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
13898 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
13899 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
13900 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
13901 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
13902 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
13903 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
13904
13905 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
13906 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13907 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13908 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13909 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13910 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13911 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
13912 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13913 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13914 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13915 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13916 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13917 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13918 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13919 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13920 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13921
13922 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13923 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13924 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13925 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13926 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13927 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13928 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
13929 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13930 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13931 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13932 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13933 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13934 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13935 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13936 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13937 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13938
13939 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
13940 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13941 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13942 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13943 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13944 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13945 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13946 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13947 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13948 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13949 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13950 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13951 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13952 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13953 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13954 /* 0xff */ IEMOP_X4(iemOp_ud0),
13955};
13956AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
13957
13958/** @} */
13959
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette