VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 98122

Last change on this file since 98122 was 98103, checked in by vboxsync, 22 months ago

Copyright year updates by scm.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 484.1 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 98103 2023-01-17 14:15:46Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * MMX, MMX.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
53 IEM_MC_BEGIN(2, 0);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_FPU_TO_MMX_MODE();
59
60 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
61 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
62 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
63 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
64
65 IEM_MC_ADVANCE_RIP_AND_FINISH();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * MMX, [mem64].
72 */
73 IEM_MC_BEGIN(2, 2);
74 IEM_MC_ARG(uint64_t *, pDst, 0);
75 IEM_MC_LOCAL(uint64_t, uSrc);
76 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
81 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_FPU_USAGE();
85 IEM_MC_FPU_TO_MMX_MODE();
86
87 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
88 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
89 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
90
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94}
95
96
97/**
98 * Common worker for MMX instructions on the form:
99 * pxxx mm1, mm2/mem64
100 *
101 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
102 * no FXSAVE state, just the operands.
103 */
104FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
105{
106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
107 if (IEM_IS_MODRM_REG_MODE(bRm))
108 {
109 /*
110 * MMX, MMX.
111 */
112 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
113 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
115 IEM_MC_BEGIN(2, 0);
116 IEM_MC_ARG(uint64_t *, pDst, 0);
117 IEM_MC_ARG(uint64_t const *, pSrc, 1);
118 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
119 IEM_MC_PREPARE_FPU_USAGE();
120 IEM_MC_FPU_TO_MMX_MODE();
121
122 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
123 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
124 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
125 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
126
127 IEM_MC_ADVANCE_RIP_AND_FINISH();
128 IEM_MC_END();
129 }
130 else
131 {
132 /*
133 * MMX, [mem64].
134 */
135 IEM_MC_BEGIN(2, 2);
136 IEM_MC_ARG(uint64_t *, pDst, 0);
137 IEM_MC_LOCAL(uint64_t, uSrc);
138 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
140
141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
143 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
144 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
145
146 IEM_MC_PREPARE_FPU_USAGE();
147 IEM_MC_FPU_TO_MMX_MODE();
148
149 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
150 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
151 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
152
153 IEM_MC_ADVANCE_RIP_AND_FINISH();
154 IEM_MC_END();
155 }
156}
157
158
159/**
160 * Common worker for MMX instructions on the form:
161 * pxxx mm1, mm2/mem64
162 * for instructions introduced with SSE.
163 */
164FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
165{
166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
167 if (IEM_IS_MODRM_REG_MODE(bRm))
168 {
169 /*
170 * MMX, MMX.
171 */
172 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
173 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
175 IEM_MC_BEGIN(2, 0);
176 IEM_MC_ARG(uint64_t *, pDst, 0);
177 IEM_MC_ARG(uint64_t const *, pSrc, 1);
178 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
179 IEM_MC_PREPARE_FPU_USAGE();
180 IEM_MC_FPU_TO_MMX_MODE();
181
182 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
183 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
184 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
185 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
186
187 IEM_MC_ADVANCE_RIP_AND_FINISH();
188 IEM_MC_END();
189 }
190 else
191 {
192 /*
193 * MMX, [mem64].
194 */
195 IEM_MC_BEGIN(2, 2);
196 IEM_MC_ARG(uint64_t *, pDst, 0);
197 IEM_MC_LOCAL(uint64_t, uSrc);
198 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200
201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
203 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
204 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
205
206 IEM_MC_PREPARE_FPU_USAGE();
207 IEM_MC_FPU_TO_MMX_MODE();
208
209 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
210 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
211 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
212
213 IEM_MC_ADVANCE_RIP_AND_FINISH();
214 IEM_MC_END();
215 }
216}
217
218
219/**
220 * Common worker for MMX instructions on the form:
221 * pxxx mm1, mm2/mem64
222 * for instructions introduced with SSE.
223 *
224 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
225 * no FXSAVE state, just the operands.
226 */
227FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
228{
229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
230 if (IEM_IS_MODRM_REG_MODE(bRm))
231 {
232 /*
233 * MMX, MMX.
234 */
235 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
236 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
238 IEM_MC_BEGIN(2, 0);
239 IEM_MC_ARG(uint64_t *, pDst, 0);
240 IEM_MC_ARG(uint64_t const *, pSrc, 1);
241 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
242 IEM_MC_PREPARE_FPU_USAGE();
243 IEM_MC_FPU_TO_MMX_MODE();
244
245 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
246 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
247 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
248 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
249
250 IEM_MC_ADVANCE_RIP_AND_FINISH();
251 IEM_MC_END();
252 }
253 else
254 {
255 /*
256 * MMX, [mem64].
257 */
258 IEM_MC_BEGIN(2, 2);
259 IEM_MC_ARG(uint64_t *, pDst, 0);
260 IEM_MC_LOCAL(uint64_t, uSrc);
261 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
263
264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
267 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
268
269 IEM_MC_PREPARE_FPU_USAGE();
270 IEM_MC_FPU_TO_MMX_MODE();
271
272 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
273 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
274 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
275
276 IEM_MC_ADVANCE_RIP_AND_FINISH();
277 IEM_MC_END();
278 }
279}
280
281
282/**
283 * Common worker for MMX instructions on the form:
284 * pxxx mm1, mm2/mem64
285 * that was introduced with SSE2.
286 */
287FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported)
288{
289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
290 if (IEM_IS_MODRM_REG_MODE(bRm))
291 {
292 /*
293 * MMX, MMX.
294 */
295 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
296 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
298 IEM_MC_BEGIN(2, 0);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_ARG(uint64_t const *, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
302 IEM_MC_PREPARE_FPU_USAGE();
303 IEM_MC_FPU_TO_MMX_MODE();
304
305 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
306 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
307 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
308 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
309
310 IEM_MC_ADVANCE_RIP_AND_FINISH();
311 IEM_MC_END();
312 }
313 else
314 {
315 /*
316 * MMX, [mem64].
317 */
318 IEM_MC_BEGIN(2, 2);
319 IEM_MC_ARG(uint64_t *, pDst, 0);
320 IEM_MC_LOCAL(uint64_t, uSrc);
321 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
323
324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
326 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
327 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
328
329 IEM_MC_PREPARE_FPU_USAGE();
330 IEM_MC_FPU_TO_MMX_MODE();
331
332 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
333 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
334 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
335
336 IEM_MC_ADVANCE_RIP_AND_FINISH();
337 IEM_MC_END();
338 }
339}
340
341
342/**
343 * Common worker for SSE instructions of the form:
344 * pxxx xmm1, xmm2/mem128
345 *
346 * Proper alignment of the 128-bit operand is enforced.
347 * SSE cpuid checks. No SIMD FP exceptions.
348 *
349 * @sa iemOpCommonSse2_FullFull_To_Full
350 */
351FNIEMOP_DEF_1(iemOpCommonSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
352{
353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
354 if (IEM_IS_MODRM_REG_MODE(bRm))
355 {
356 /*
357 * XMM, XMM.
358 */
359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
360 IEM_MC_BEGIN(2, 0);
361 IEM_MC_ARG(PRTUINT128U, pDst, 0);
362 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
364 IEM_MC_PREPARE_SSE_USAGE();
365 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
366 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
367 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
368 IEM_MC_ADVANCE_RIP_AND_FINISH();
369 IEM_MC_END();
370 }
371 else
372 {
373 /*
374 * XMM, [mem128].
375 */
376 IEM_MC_BEGIN(2, 2);
377 IEM_MC_ARG(PRTUINT128U, pDst, 0);
378 IEM_MC_LOCAL(RTUINT128U, uSrc);
379 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
381
382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
384 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
385 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
386
387 IEM_MC_PREPARE_SSE_USAGE();
388 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
389 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
390
391 IEM_MC_ADVANCE_RIP_AND_FINISH();
392 IEM_MC_END();
393 }
394}
395
396
397/**
398 * Common worker for SSE2 instructions on the forms:
399 * pxxx xmm1, xmm2/mem128
400 *
401 * Proper alignment of the 128-bit operand is enforced.
402 * Exceptions type 4. SSE2 cpuid checks.
403 *
404 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
405 */
406FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
407{
408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
409 if (IEM_IS_MODRM_REG_MODE(bRm))
410 {
411 /*
412 * XMM, XMM.
413 */
414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
415 IEM_MC_BEGIN(2, 0);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
418 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
419 IEM_MC_PREPARE_SSE_USAGE();
420 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
421 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
422 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
423 IEM_MC_ADVANCE_RIP_AND_FINISH();
424 IEM_MC_END();
425 }
426 else
427 {
428 /*
429 * XMM, [mem128].
430 */
431 IEM_MC_BEGIN(2, 2);
432 IEM_MC_ARG(PRTUINT128U, pDst, 0);
433 IEM_MC_LOCAL(RTUINT128U, uSrc);
434 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
436
437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
439 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
440 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
441
442 IEM_MC_PREPARE_SSE_USAGE();
443 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
444 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
445
446 IEM_MC_ADVANCE_RIP_AND_FINISH();
447 IEM_MC_END();
448 }
449}
450
451
452/**
453 * Common worker for SSE2 instructions on the forms:
454 * pxxx xmm1, xmm2/mem128
455 *
456 * Proper alignment of the 128-bit operand is enforced.
457 * Exceptions type 4. SSE2 cpuid checks.
458 *
459 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
460 * no FXSAVE state, just the operands.
461 *
462 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
463 */
464FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
465{
466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
467 if (IEM_IS_MODRM_REG_MODE(bRm))
468 {
469 /*
470 * XMM, XMM.
471 */
472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
473 IEM_MC_BEGIN(2, 0);
474 IEM_MC_ARG(PRTUINT128U, pDst, 0);
475 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
476 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
477 IEM_MC_PREPARE_SSE_USAGE();
478 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
479 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
480 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
481 IEM_MC_ADVANCE_RIP_AND_FINISH();
482 IEM_MC_END();
483 }
484 else
485 {
486 /*
487 * XMM, [mem128].
488 */
489 IEM_MC_BEGIN(2, 2);
490 IEM_MC_ARG(PRTUINT128U, pDst, 0);
491 IEM_MC_LOCAL(RTUINT128U, uSrc);
492 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
494
495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
497 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
498 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
499
500 IEM_MC_PREPARE_SSE_USAGE();
501 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
502 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
503
504 IEM_MC_ADVANCE_RIP_AND_FINISH();
505 IEM_MC_END();
506 }
507}
508
509
510/**
511 * Common worker for MMX instructions on the forms:
512 * pxxxx mm1, mm2/mem32
513 *
514 * The 2nd operand is the first half of a register, which in the memory case
515 * means a 32-bit memory access.
516 */
517FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, FNIEMAIMPLMEDIAOPTF2U64, pfnU64)
518{
519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
520 if (IEM_IS_MODRM_REG_MODE(bRm))
521 {
522 /*
523 * MMX, MMX.
524 */
525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
526 IEM_MC_BEGIN(2, 0);
527 IEM_MC_ARG(uint64_t *, puDst, 0);
528 IEM_MC_ARG(uint64_t const *, puSrc, 1);
529 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
530 IEM_MC_PREPARE_FPU_USAGE();
531 IEM_MC_FPU_TO_MMX_MODE();
532
533 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
534 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
535 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
536 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
537
538 IEM_MC_ADVANCE_RIP_AND_FINISH();
539 IEM_MC_END();
540 }
541 else
542 {
543 /*
544 * MMX, [mem32].
545 */
546 IEM_MC_BEGIN(2, 2);
547 IEM_MC_ARG(uint64_t *, puDst, 0);
548 IEM_MC_LOCAL(uint64_t, uSrc);
549 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
551
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
554 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
555 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
556
557 IEM_MC_PREPARE_FPU_USAGE();
558 IEM_MC_FPU_TO_MMX_MODE();
559
560 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
562 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
563
564 IEM_MC_ADVANCE_RIP_AND_FINISH();
565 IEM_MC_END();
566 }
567}
568
569
570/**
571 * Common worker for SSE instructions on the forms:
572 * pxxxx xmm1, xmm2/mem128
573 *
574 * The 2nd operand is the first half of a register, which in the memory case
575 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
576 *
577 * Exceptions type 4.
578 */
579FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
580{
581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
582 if (IEM_IS_MODRM_REG_MODE(bRm))
583 {
584 /*
585 * XMM, XMM.
586 */
587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
588 IEM_MC_BEGIN(2, 0);
589 IEM_MC_ARG(PRTUINT128U, puDst, 0);
590 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
591 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
594 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
595 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
596 IEM_MC_ADVANCE_RIP_AND_FINISH();
597 IEM_MC_END();
598 }
599 else
600 {
601 /*
602 * XMM, [mem128].
603 */
604 IEM_MC_BEGIN(2, 2);
605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
606 IEM_MC_LOCAL(RTUINT128U, uSrc);
607 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
609
610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
612 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
613 /** @todo Most CPUs probably only read the low qword. We read everything to
614 * make sure we apply segmentation and alignment checks correctly.
615 * When we have time, it would be interesting to explore what real
616 * CPUs actually does and whether it will do a TLB load for the high
617 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
618 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
619
620 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
621 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
623
624 IEM_MC_ADVANCE_RIP_AND_FINISH();
625 IEM_MC_END();
626 }
627}
628
629
630/**
631 * Common worker for SSE2 instructions on the forms:
632 * pxxxx xmm1, xmm2/mem128
633 *
634 * The 2nd operand is the first half of a register, which in the memory case
635 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
636 *
637 * Exceptions type 4.
638 */
639FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
640{
641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
642 if (IEM_IS_MODRM_REG_MODE(bRm))
643 {
644 /*
645 * XMM, XMM.
646 */
647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
648 IEM_MC_BEGIN(2, 0);
649 IEM_MC_ARG(PRTUINT128U, puDst, 0);
650 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
651 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
653 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
654 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
655 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
656 IEM_MC_ADVANCE_RIP_AND_FINISH();
657 IEM_MC_END();
658 }
659 else
660 {
661 /*
662 * XMM, [mem128].
663 */
664 IEM_MC_BEGIN(2, 2);
665 IEM_MC_ARG(PRTUINT128U, puDst, 0);
666 IEM_MC_LOCAL(RTUINT128U, uSrc);
667 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
669
670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
672 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
673 /** @todo Most CPUs probably only read the low qword. We read everything to
674 * make sure we apply segmentation and alignment checks correctly.
675 * When we have time, it would be interesting to explore what real
676 * CPUs actually does and whether it will do a TLB load for the high
677 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
678 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
679
680 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
681 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
682 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
683
684 IEM_MC_ADVANCE_RIP_AND_FINISH();
685 IEM_MC_END();
686 }
687}
688
689
690/**
691 * Common worker for MMX instructions on the form:
692 * pxxxx mm1, mm2/mem64
693 *
694 * The 2nd operand is the second half of a register, which in the memory case
695 * means a 64-bit memory access for MMX.
696 */
697FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
698{
699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
700 if (IEM_IS_MODRM_REG_MODE(bRm))
701 {
702 /*
703 * MMX, MMX.
704 */
705 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
706 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
708 IEM_MC_BEGIN(2, 0);
709 IEM_MC_ARG(uint64_t *, puDst, 0);
710 IEM_MC_ARG(uint64_t const *, puSrc, 1);
711 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
712 IEM_MC_PREPARE_FPU_USAGE();
713 IEM_MC_FPU_TO_MMX_MODE();
714
715 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
716 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
717 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
718 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
719
720 IEM_MC_ADVANCE_RIP_AND_FINISH();
721 IEM_MC_END();
722 }
723 else
724 {
725 /*
726 * MMX, [mem64].
727 */
728 IEM_MC_BEGIN(2, 2);
729 IEM_MC_ARG(uint64_t *, puDst, 0);
730 IEM_MC_LOCAL(uint64_t, uSrc);
731 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
733
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
736 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
737 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
738
739 IEM_MC_PREPARE_FPU_USAGE();
740 IEM_MC_FPU_TO_MMX_MODE();
741
742 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
743 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
744 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
745
746 IEM_MC_ADVANCE_RIP_AND_FINISH();
747 IEM_MC_END();
748 }
749}
750
751
752/**
753 * Common worker for SSE instructions on the form:
754 * pxxxx xmm1, xmm2/mem128
755 *
756 * The 2nd operand is the second half of a register, which for SSE a 128-bit
757 * aligned access where it may read the full 128 bits or only the upper 64 bits.
758 *
759 * Exceptions type 4.
760 */
761FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
762{
763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
764 if (IEM_IS_MODRM_REG_MODE(bRm))
765 {
766 /*
767 * XMM, XMM.
768 */
769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
770 IEM_MC_BEGIN(2, 0);
771 IEM_MC_ARG(PRTUINT128U, puDst, 0);
772 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
773 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
774 IEM_MC_PREPARE_SSE_USAGE();
775 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
776 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
777 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
778 IEM_MC_ADVANCE_RIP_AND_FINISH();
779 IEM_MC_END();
780 }
781 else
782 {
783 /*
784 * XMM, [mem128].
785 */
786 IEM_MC_BEGIN(2, 2);
787 IEM_MC_ARG(PRTUINT128U, puDst, 0);
788 IEM_MC_LOCAL(RTUINT128U, uSrc);
789 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
791
792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
794 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
795 /** @todo Most CPUs probably only read the high qword. We read everything to
796 * make sure we apply segmentation and alignment checks correctly.
797 * When we have time, it would be interesting to explore what real
798 * CPUs actually does and whether it will do a TLB load for the lower
799 * part or skip any associated \#PF. */
800 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801
802 IEM_MC_PREPARE_SSE_USAGE();
803 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
804 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
805
806 IEM_MC_ADVANCE_RIP_AND_FINISH();
807 IEM_MC_END();
808 }
809}
810
811
812/**
813 * Common worker for SSE instructions on the forms:
814 * pxxs xmm1, xmm2/mem128
815 *
816 * Proper alignment of the 128-bit operand is enforced.
817 * Exceptions type 2. SSE cpuid checks.
818 *
819 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
820 */
821FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
822{
823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
824 if (IEM_IS_MODRM_REG_MODE(bRm))
825 {
826 /*
827 * XMM128, XMM128.
828 */
829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
830 IEM_MC_BEGIN(3, 1);
831 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
832 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
833 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
834 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
835 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
836 IEM_MC_PREPARE_SSE_USAGE();
837 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
838 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
839 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
840 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
841 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
842
843 IEM_MC_ADVANCE_RIP_AND_FINISH();
844 IEM_MC_END();
845 }
846 else
847 {
848 /*
849 * XMM128, [mem128].
850 */
851 IEM_MC_BEGIN(3, 2);
852 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
853 IEM_MC_LOCAL(X86XMMREG, uSrc2);
854 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
855 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
856 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
858
859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
861 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
862 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
863
864 IEM_MC_PREPARE_SSE_USAGE();
865 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
866 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
867 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
868 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
869
870 IEM_MC_ADVANCE_RIP_AND_FINISH();
871 IEM_MC_END();
872 }
873}
874
875
876/**
877 * Common worker for SSE instructions on the forms:
878 * pxxs xmm1, xmm2/mem32
879 *
880 * Proper alignment of the 128-bit operand is enforced.
881 * Exceptions type 2. SSE cpuid checks.
882 *
883 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
884 */
885FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
886{
887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
888 if (IEM_IS_MODRM_REG_MODE(bRm))
889 {
890 /*
891 * XMM128, XMM32.
892 */
893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
894 IEM_MC_BEGIN(3, 1);
895 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
896 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
897 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
898 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
899 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
900 IEM_MC_PREPARE_SSE_USAGE();
901 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
902 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
903 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
904 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
905 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
906
907 IEM_MC_ADVANCE_RIP_AND_FINISH();
908 IEM_MC_END();
909 }
910 else
911 {
912 /*
913 * XMM128, [mem32].
914 */
915 IEM_MC_BEGIN(3, 2);
916 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
917 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
918 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
919 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
920 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
922
923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
926 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
927
928 IEM_MC_PREPARE_SSE_USAGE();
929 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
930 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
931 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
932 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
933
934 IEM_MC_ADVANCE_RIP_AND_FINISH();
935 IEM_MC_END();
936 }
937}
938
939
940/**
941 * Common worker for SSE2 instructions on the forms:
942 * pxxd xmm1, xmm2/mem128
943 *
944 * Proper alignment of the 128-bit operand is enforced.
945 * Exceptions type 2. SSE cpuid checks.
946 *
947 * @sa iemOpCommonSseFp_FullFull_To_Full
948 */
949FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
950{
951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
952 if (IEM_IS_MODRM_REG_MODE(bRm))
953 {
954 /*
955 * XMM128, XMM128.
956 */
957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
958 IEM_MC_BEGIN(3, 1);
959 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
960 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
961 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
962 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
963 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
964 IEM_MC_PREPARE_SSE_USAGE();
965 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
966 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
967 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
968 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
969 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
970
971 IEM_MC_ADVANCE_RIP_AND_FINISH();
972 IEM_MC_END();
973 }
974 else
975 {
976 /*
977 * XMM128, [mem128].
978 */
979 IEM_MC_BEGIN(3, 2);
980 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
981 IEM_MC_LOCAL(X86XMMREG, uSrc2);
982 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
983 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
984 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
989 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
990 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
991
992 IEM_MC_PREPARE_SSE_USAGE();
993 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
994 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
995 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
996 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
997
998 IEM_MC_ADVANCE_RIP_AND_FINISH();
999 IEM_MC_END();
1000 }
1001}
1002
1003
1004/**
1005 * Common worker for SSE2 instructions on the forms:
1006 * pxxs xmm1, xmm2/mem64
1007 *
1008 * Proper alignment of the 128-bit operand is enforced.
1009 * Exceptions type 2. SSE2 cpuid checks.
1010 *
1011 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1012 */
1013FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
1014{
1015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1016 if (IEM_IS_MODRM_REG_MODE(bRm))
1017 {
1018 /*
1019 * XMM, XMM.
1020 */
1021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1022 IEM_MC_BEGIN(3, 1);
1023 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1024 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1025 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1026 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
1027 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1028 IEM_MC_PREPARE_SSE_USAGE();
1029 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1031 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
1032 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1033 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1034
1035 IEM_MC_ADVANCE_RIP_AND_FINISH();
1036 IEM_MC_END();
1037 }
1038 else
1039 {
1040 /*
1041 * XMM, [mem64].
1042 */
1043 IEM_MC_BEGIN(3, 2);
1044 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1045 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
1046 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1047 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1048 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
1049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1050
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1053 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1054 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1055
1056 IEM_MC_PREPARE_SSE_USAGE();
1057 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1058 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
1059 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1060 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1061
1062 IEM_MC_ADVANCE_RIP_AND_FINISH();
1063 IEM_MC_END();
1064 }
1065}
1066
1067
1068/**
1069 * Common worker for SSE2 instructions on the form:
1070 * pxxxx xmm1, xmm2/mem128
1071 *
1072 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1073 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1074 *
1075 * Exceptions type 4.
1076 */
1077FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1078{
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /*
1083 * XMM, XMM.
1084 */
1085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1086 IEM_MC_BEGIN(2, 0);
1087 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1088 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1089 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1090 IEM_MC_PREPARE_SSE_USAGE();
1091 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1092 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1093 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1094 IEM_MC_ADVANCE_RIP_AND_FINISH();
1095 IEM_MC_END();
1096 }
1097 else
1098 {
1099 /*
1100 * XMM, [mem128].
1101 */
1102 IEM_MC_BEGIN(2, 2);
1103 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1104 IEM_MC_LOCAL(RTUINT128U, uSrc);
1105 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1107
1108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1110 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1111 /** @todo Most CPUs probably only read the high qword. We read everything to
1112 * make sure we apply segmentation and alignment checks correctly.
1113 * When we have time, it would be interesting to explore what real
1114 * CPUs actually does and whether it will do a TLB load for the lower
1115 * part or skip any associated \#PF. */
1116 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1117
1118 IEM_MC_PREPARE_SSE_USAGE();
1119 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1120 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1121
1122 IEM_MC_ADVANCE_RIP_AND_FINISH();
1123 IEM_MC_END();
1124 }
1125}
1126
1127
1128/**
1129 * Common worker for SSE3 instructions on the forms:
1130 * hxxx xmm1, xmm2/mem128
1131 *
1132 * Proper alignment of the 128-bit operand is enforced.
1133 * Exceptions type 2. SSE3 cpuid checks.
1134 *
1135 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1136 */
1137FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1138{
1139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1140 if (IEM_IS_MODRM_REG_MODE(bRm))
1141 {
1142 /*
1143 * XMM, XMM.
1144 */
1145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1146 IEM_MC_BEGIN(3, 1);
1147 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1148 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1149 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1150 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1151 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1152 IEM_MC_PREPARE_SSE_USAGE();
1153 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1154 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1155 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1156 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1157 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1158
1159 IEM_MC_ADVANCE_RIP_AND_FINISH();
1160 IEM_MC_END();
1161 }
1162 else
1163 {
1164 /*
1165 * XMM, [mem128].
1166 */
1167 IEM_MC_BEGIN(3, 2);
1168 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1169 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1170 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1171 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1172 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1174
1175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1177 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1178 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1179
1180 IEM_MC_PREPARE_SSE_USAGE();
1181 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1182 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1183 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1184 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1185
1186 IEM_MC_ADVANCE_RIP_AND_FINISH();
1187 IEM_MC_END();
1188 }
1189}
1190
1191
1192/** Opcode 0x0f 0x00 /0. */
1193FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1194{
1195 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1196 IEMOP_HLP_MIN_286();
1197 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1198
1199 if (IEM_IS_MODRM_REG_MODE(bRm))
1200 {
1201 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1202 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1203 }
1204
1205 /* Ignore operand size here, memory refs are always 16-bit. */
1206 IEM_MC_BEGIN(2, 0);
1207 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1208 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1210 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1211 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1212 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1213 IEM_MC_END();
1214 return VINF_SUCCESS;
1215}
1216
1217
1218/** Opcode 0x0f 0x00 /1. */
1219FNIEMOPRM_DEF(iemOp_Grp6_str)
1220{
1221 IEMOP_MNEMONIC(str, "str Rv/Mw");
1222 IEMOP_HLP_MIN_286();
1223 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1224
1225
1226 if (IEM_IS_MODRM_REG_MODE(bRm))
1227 {
1228 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1229 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1230 }
1231
1232 /* Ignore operand size here, memory refs are always 16-bit. */
1233 IEM_MC_BEGIN(2, 0);
1234 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1235 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1237 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1238 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1239 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1240 IEM_MC_END();
1241 return VINF_SUCCESS;
1242}
1243
1244
1245/** Opcode 0x0f 0x00 /2. */
1246FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1247{
1248 IEMOP_MNEMONIC(lldt, "lldt Ew");
1249 IEMOP_HLP_MIN_286();
1250 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1251
1252 if (IEM_IS_MODRM_REG_MODE(bRm))
1253 {
1254 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1255 IEM_MC_BEGIN(1, 0);
1256 IEM_MC_ARG(uint16_t, u16Sel, 0);
1257 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1258 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1259 IEM_MC_END();
1260 }
1261 else
1262 {
1263 IEM_MC_BEGIN(1, 1);
1264 IEM_MC_ARG(uint16_t, u16Sel, 0);
1265 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1267 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1268 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1269 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1270 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1271 IEM_MC_END();
1272 }
1273 return VINF_SUCCESS;
1274}
1275
1276
1277/** Opcode 0x0f 0x00 /3. */
1278FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1279{
1280 IEMOP_MNEMONIC(ltr, "ltr Ew");
1281 IEMOP_HLP_MIN_286();
1282 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1283
1284 if (IEM_IS_MODRM_REG_MODE(bRm))
1285 {
1286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1287 IEM_MC_BEGIN(1, 0);
1288 IEM_MC_ARG(uint16_t, u16Sel, 0);
1289 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1290 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1291 IEM_MC_END();
1292 }
1293 else
1294 {
1295 IEM_MC_BEGIN(1, 1);
1296 IEM_MC_ARG(uint16_t, u16Sel, 0);
1297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1300 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1301 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1302 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1303 IEM_MC_END();
1304 }
1305 return VINF_SUCCESS;
1306}
1307
1308
1309/** Opcode 0x0f 0x00 /3. */
1310FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1311{
1312 IEMOP_HLP_MIN_286();
1313 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1314
1315 if (IEM_IS_MODRM_REG_MODE(bRm))
1316 {
1317 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1318 IEM_MC_BEGIN(2, 0);
1319 IEM_MC_ARG(uint16_t, u16Sel, 0);
1320 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1321 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1322 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1323 IEM_MC_END();
1324 }
1325 else
1326 {
1327 IEM_MC_BEGIN(2, 1);
1328 IEM_MC_ARG(uint16_t, u16Sel, 0);
1329 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1332 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1333 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1334 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1335 IEM_MC_END();
1336 }
1337 return VINF_SUCCESS;
1338}
1339
1340
1341/** Opcode 0x0f 0x00 /4. */
1342FNIEMOPRM_DEF(iemOp_Grp6_verr)
1343{
1344 IEMOP_MNEMONIC(verr, "verr Ew");
1345 IEMOP_HLP_MIN_286();
1346 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1347}
1348
1349
1350/** Opcode 0x0f 0x00 /5. */
1351FNIEMOPRM_DEF(iemOp_Grp6_verw)
1352{
1353 IEMOP_MNEMONIC(verw, "verw Ew");
1354 IEMOP_HLP_MIN_286();
1355 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1356}
1357
1358
1359/**
1360 * Group 6 jump table.
1361 */
1362IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1363{
1364 iemOp_Grp6_sldt,
1365 iemOp_Grp6_str,
1366 iemOp_Grp6_lldt,
1367 iemOp_Grp6_ltr,
1368 iemOp_Grp6_verr,
1369 iemOp_Grp6_verw,
1370 iemOp_InvalidWithRM,
1371 iemOp_InvalidWithRM
1372};
1373
1374/** Opcode 0x0f 0x00. */
1375FNIEMOP_DEF(iemOp_Grp6)
1376{
1377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1378 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1379}
1380
1381
1382/** Opcode 0x0f 0x01 /0. */
1383FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1384{
1385 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1386 IEMOP_HLP_MIN_286();
1387 IEMOP_HLP_64BIT_OP_SIZE();
1388 IEM_MC_BEGIN(2, 1);
1389 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1390 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1393 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1394 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1395 IEM_MC_END();
1396 return VINF_SUCCESS;
1397}
1398
1399
1400/** Opcode 0x0f 0x01 /0. */
1401FNIEMOP_DEF(iemOp_Grp7_vmcall)
1402{
1403 IEMOP_MNEMONIC(vmcall, "vmcall");
1404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1405
1406 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1407 want all hypercalls regardless of instruction used, and if a
1408 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1409 (NEM/win makes ASSUMPTIONS about this behavior.) */
1410 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
1411}
1412
1413
1414/** Opcode 0x0f 0x01 /0. */
1415#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1416FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1417{
1418 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1419 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1420 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1421 IEMOP_HLP_DONE_DECODING();
1422 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
1423}
1424#else
1425FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1426{
1427 IEMOP_BITCH_ABOUT_STUB();
1428 return IEMOP_RAISE_INVALID_OPCODE();
1429}
1430#endif
1431
1432
1433/** Opcode 0x0f 0x01 /0. */
1434#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1435FNIEMOP_DEF(iemOp_Grp7_vmresume)
1436{
1437 IEMOP_MNEMONIC(vmresume, "vmresume");
1438 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1439 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1440 IEMOP_HLP_DONE_DECODING();
1441 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
1442}
1443#else
1444FNIEMOP_DEF(iemOp_Grp7_vmresume)
1445{
1446 IEMOP_BITCH_ABOUT_STUB();
1447 return IEMOP_RAISE_INVALID_OPCODE();
1448}
1449#endif
1450
1451
1452/** Opcode 0x0f 0x01 /0. */
1453#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1454FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1455{
1456 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1457 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1458 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1459 IEMOP_HLP_DONE_DECODING();
1460 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
1461}
1462#else
1463FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1464{
1465 IEMOP_BITCH_ABOUT_STUB();
1466 return IEMOP_RAISE_INVALID_OPCODE();
1467}
1468#endif
1469
1470
1471/** Opcode 0x0f 0x01 /1. */
1472FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1473{
1474 IEMOP_MNEMONIC(sidt, "sidt Ms");
1475 IEMOP_HLP_MIN_286();
1476 IEMOP_HLP_64BIT_OP_SIZE();
1477 IEM_MC_BEGIN(2, 1);
1478 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1479 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1482 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1483 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1484 IEM_MC_END();
1485 return VINF_SUCCESS;
1486}
1487
1488
1489/** Opcode 0x0f 0x01 /1. */
1490FNIEMOP_DEF(iemOp_Grp7_monitor)
1491{
1492 IEMOP_MNEMONIC(monitor, "monitor");
1493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1494 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1495}
1496
1497
1498/** Opcode 0x0f 0x01 /1. */
1499FNIEMOP_DEF(iemOp_Grp7_mwait)
1500{
1501 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1503 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
1504}
1505
1506
1507/** Opcode 0x0f 0x01 /2. */
1508FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1509{
1510 IEMOP_MNEMONIC(lgdt, "lgdt");
1511 IEMOP_HLP_64BIT_OP_SIZE();
1512 IEM_MC_BEGIN(3, 1);
1513 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1514 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1515 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1518 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1519 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1520 IEM_MC_END();
1521 return VINF_SUCCESS;
1522}
1523
1524
1525/** Opcode 0x0f 0x01 0xd0. */
1526FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1527{
1528 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1529 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1530 {
1531 /** @todo r=ramshankar: We should use
1532 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1533 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1534 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1535 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1536 }
1537 return IEMOP_RAISE_INVALID_OPCODE();
1538}
1539
1540
1541/** Opcode 0x0f 0x01 0xd1. */
1542FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1543{
1544 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1545 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1546 {
1547 /** @todo r=ramshankar: We should use
1548 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1549 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1550 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1551 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1552 }
1553 return IEMOP_RAISE_INVALID_OPCODE();
1554}
1555
1556
1557/** Opcode 0x0f 0x01 /3. */
1558FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1559{
1560 IEMOP_MNEMONIC(lidt, "lidt");
1561 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1562 ? IEMMODE_64BIT
1563 : pVCpu->iem.s.enmEffOpSize;
1564 IEM_MC_BEGIN(3, 1);
1565 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1566 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1567 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1570 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1571 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1572 IEM_MC_END();
1573 return VINF_SUCCESS;
1574}
1575
1576
1577/** Opcode 0x0f 0x01 0xd8. */
1578#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1579FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1580{
1581 IEMOP_MNEMONIC(vmrun, "vmrun");
1582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1583 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
1584}
1585#else
1586FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1587#endif
1588
1589/** Opcode 0x0f 0x01 0xd9. */
1590FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1591{
1592 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1594
1595 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1596 want all hypercalls regardless of instruction used, and if a
1597 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1598 (NEM/win makes ASSUMPTIONS about this behavior.) */
1599 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
1600}
1601
1602/** Opcode 0x0f 0x01 0xda. */
1603#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1604FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1605{
1606 IEMOP_MNEMONIC(vmload, "vmload");
1607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1608 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
1609}
1610#else
1611FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1612#endif
1613
1614
1615/** Opcode 0x0f 0x01 0xdb. */
1616#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1617FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1618{
1619 IEMOP_MNEMONIC(vmsave, "vmsave");
1620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1621 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
1622}
1623#else
1624FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1625#endif
1626
1627
1628/** Opcode 0x0f 0x01 0xdc. */
1629#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1630FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1631{
1632 IEMOP_MNEMONIC(stgi, "stgi");
1633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1634 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
1635}
1636#else
1637FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1638#endif
1639
1640
1641/** Opcode 0x0f 0x01 0xdd. */
1642#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1643FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1644{
1645 IEMOP_MNEMONIC(clgi, "clgi");
1646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1647 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
1648}
1649#else
1650FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1651#endif
1652
1653
1654/** Opcode 0x0f 0x01 0xdf. */
1655#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1656FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1657{
1658 IEMOP_MNEMONIC(invlpga, "invlpga");
1659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1660 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
1661}
1662#else
1663FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1664#endif
1665
1666
1667/** Opcode 0x0f 0x01 0xde. */
1668#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1669FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1670{
1671 IEMOP_MNEMONIC(skinit, "skinit");
1672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1673 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
1674}
1675#else
1676FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1677#endif
1678
1679
1680/** Opcode 0x0f 0x01 /4. */
1681FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1682{
1683 IEMOP_MNEMONIC(smsw, "smsw");
1684 IEMOP_HLP_MIN_286();
1685 if (IEM_IS_MODRM_REG_MODE(bRm))
1686 {
1687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1688 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1689 }
1690
1691 /* Ignore operand size here, memory refs are always 16-bit. */
1692 IEM_MC_BEGIN(2, 0);
1693 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1694 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1697 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1698 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1699 IEM_MC_END();
1700 return VINF_SUCCESS;
1701}
1702
1703
1704/** Opcode 0x0f 0x01 /6. */
1705FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1706{
1707 /* The operand size is effectively ignored, all is 16-bit and only the
1708 lower 3-bits are used. */
1709 IEMOP_MNEMONIC(lmsw, "lmsw");
1710 IEMOP_HLP_MIN_286();
1711 if (IEM_IS_MODRM_REG_MODE(bRm))
1712 {
1713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1714 IEM_MC_BEGIN(2, 0);
1715 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1716 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1717 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1718 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1719 IEM_MC_END();
1720 }
1721 else
1722 {
1723 IEM_MC_BEGIN(2, 0);
1724 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1725 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1728 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1729 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1730 IEM_MC_END();
1731 }
1732 return VINF_SUCCESS;
1733}
1734
1735
1736/** Opcode 0x0f 0x01 /7. */
1737FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1738{
1739 IEMOP_MNEMONIC(invlpg, "invlpg");
1740 IEMOP_HLP_MIN_486();
1741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1742 IEM_MC_BEGIN(1, 1);
1743 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1745 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1746 IEM_MC_END();
1747 return VINF_SUCCESS;
1748}
1749
1750
1751/** Opcode 0x0f 0x01 0xf8. */
1752FNIEMOP_DEF(iemOp_Grp7_swapgs)
1753{
1754 IEMOP_MNEMONIC(swapgs, "swapgs");
1755 IEMOP_HLP_ONLY_64BIT();
1756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1757 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1758}
1759
1760
1761/** Opcode 0x0f 0x01 0xf9. */
1762FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1763{
1764 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1766 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
1767}
1768
1769
1770/**
1771 * Group 7 jump table, memory variant.
1772 */
1773IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1774{
1775 iemOp_Grp7_sgdt,
1776 iemOp_Grp7_sidt,
1777 iemOp_Grp7_lgdt,
1778 iemOp_Grp7_lidt,
1779 iemOp_Grp7_smsw,
1780 iemOp_InvalidWithRM,
1781 iemOp_Grp7_lmsw,
1782 iemOp_Grp7_invlpg
1783};
1784
1785
1786/** Opcode 0x0f 0x01. */
1787FNIEMOP_DEF(iemOp_Grp7)
1788{
1789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1790 if (IEM_IS_MODRM_MEM_MODE(bRm))
1791 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1792
1793 switch (IEM_GET_MODRM_REG_8(bRm))
1794 {
1795 case 0:
1796 switch (IEM_GET_MODRM_RM_8(bRm))
1797 {
1798 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1799 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1800 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1801 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1802 }
1803 return IEMOP_RAISE_INVALID_OPCODE();
1804
1805 case 1:
1806 switch (IEM_GET_MODRM_RM_8(bRm))
1807 {
1808 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1809 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1810 }
1811 return IEMOP_RAISE_INVALID_OPCODE();
1812
1813 case 2:
1814 switch (IEM_GET_MODRM_RM_8(bRm))
1815 {
1816 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1817 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1818 }
1819 return IEMOP_RAISE_INVALID_OPCODE();
1820
1821 case 3:
1822 switch (IEM_GET_MODRM_RM_8(bRm))
1823 {
1824 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1825 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1826 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1827 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1828 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1829 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1830 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1831 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1833 }
1834
1835 case 4:
1836 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1837
1838 case 5:
1839 return IEMOP_RAISE_INVALID_OPCODE();
1840
1841 case 6:
1842 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1843
1844 case 7:
1845 switch (IEM_GET_MODRM_RM_8(bRm))
1846 {
1847 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1848 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1849 }
1850 return IEMOP_RAISE_INVALID_OPCODE();
1851
1852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1853 }
1854}
1855
1856/** Opcode 0x0f 0x00 /3. */
1857FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1858{
1859 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1861
1862 if (IEM_IS_MODRM_REG_MODE(bRm))
1863 {
1864 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1865 switch (pVCpu->iem.s.enmEffOpSize)
1866 {
1867 case IEMMODE_16BIT:
1868 {
1869 IEM_MC_BEGIN(3, 0);
1870 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1871 IEM_MC_ARG(uint16_t, u16Sel, 1);
1872 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1873
1874 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1875 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1876 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1877
1878 IEM_MC_END();
1879 return VINF_SUCCESS;
1880 }
1881
1882 case IEMMODE_32BIT:
1883 case IEMMODE_64BIT:
1884 {
1885 IEM_MC_BEGIN(3, 0);
1886 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1887 IEM_MC_ARG(uint16_t, u16Sel, 1);
1888 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1889
1890 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1891 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1892 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1893
1894 IEM_MC_END();
1895 return VINF_SUCCESS;
1896 }
1897
1898 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1899 }
1900 }
1901 else
1902 {
1903 switch (pVCpu->iem.s.enmEffOpSize)
1904 {
1905 case IEMMODE_16BIT:
1906 {
1907 IEM_MC_BEGIN(3, 1);
1908 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1909 IEM_MC_ARG(uint16_t, u16Sel, 1);
1910 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1912
1913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1914 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1915
1916 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1917 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1918 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1919
1920 IEM_MC_END();
1921 return VINF_SUCCESS;
1922 }
1923
1924 case IEMMODE_32BIT:
1925 case IEMMODE_64BIT:
1926 {
1927 IEM_MC_BEGIN(3, 1);
1928 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1929 IEM_MC_ARG(uint16_t, u16Sel, 1);
1930 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1932
1933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1934 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1935/** @todo testcase: make sure it's a 16-bit read. */
1936
1937 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1938 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1939 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1940
1941 IEM_MC_END();
1942 return VINF_SUCCESS;
1943 }
1944
1945 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1946 }
1947 }
1948}
1949
1950
1951
1952/** Opcode 0x0f 0x02. */
1953FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1954{
1955 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1956 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1957}
1958
1959
1960/** Opcode 0x0f 0x03. */
1961FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1962{
1963 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1964 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1965}
1966
1967
1968/** Opcode 0x0f 0x05. */
1969FNIEMOP_DEF(iemOp_syscall)
1970{
1971 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1973 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1974}
1975
1976
1977/** Opcode 0x0f 0x06. */
1978FNIEMOP_DEF(iemOp_clts)
1979{
1980 IEMOP_MNEMONIC(clts, "clts");
1981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1982 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1983}
1984
1985
1986/** Opcode 0x0f 0x07. */
1987FNIEMOP_DEF(iemOp_sysret)
1988{
1989 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1991 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1992}
1993
1994
1995/** Opcode 0x0f 0x08. */
1996FNIEMOP_DEF(iemOp_invd)
1997{
1998 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1999 IEMOP_HLP_MIN_486();
2000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2001 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
2002}
2003
2004
2005/** Opcode 0x0f 0x09. */
2006FNIEMOP_DEF(iemOp_wbinvd)
2007{
2008 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
2009 IEMOP_HLP_MIN_486();
2010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2011 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
2012}
2013
2014
2015/** Opcode 0x0f 0x0b. */
2016FNIEMOP_DEF(iemOp_ud2)
2017{
2018 IEMOP_MNEMONIC(ud2, "ud2");
2019 return IEMOP_RAISE_INVALID_OPCODE();
2020}
2021
2022/** Opcode 0x0f 0x0d. */
2023FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
2024{
2025 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
2026 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
2027 {
2028 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
2029 return IEMOP_RAISE_INVALID_OPCODE();
2030 }
2031
2032 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2033 if (IEM_IS_MODRM_REG_MODE(bRm))
2034 {
2035 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
2036 return IEMOP_RAISE_INVALID_OPCODE();
2037 }
2038
2039 switch (IEM_GET_MODRM_REG_8(bRm))
2040 {
2041 case 2: /* Aliased to /0 for the time being. */
2042 case 4: /* Aliased to /0 for the time being. */
2043 case 5: /* Aliased to /0 for the time being. */
2044 case 6: /* Aliased to /0 for the time being. */
2045 case 7: /* Aliased to /0 for the time being. */
2046 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2047 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2048 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2050 }
2051
2052 IEM_MC_BEGIN(0, 1);
2053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2056 /* Currently a NOP. */
2057 NOREF(GCPtrEffSrc);
2058 IEM_MC_ADVANCE_RIP_AND_FINISH();
2059 IEM_MC_END();
2060}
2061
2062
2063/** Opcode 0x0f 0x0e. */
2064FNIEMOP_DEF(iemOp_femms)
2065{
2066 IEMOP_MNEMONIC(femms, "femms");
2067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2068
2069 IEM_MC_BEGIN(0,0);
2070 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2071 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2072 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2073 IEM_MC_FPU_FROM_MMX_MODE();
2074 IEM_MC_ADVANCE_RIP_AND_FINISH();
2075 IEM_MC_END();
2076}
2077
2078
2079/** Opcode 0x0f 0x0f. */
2080FNIEMOP_DEF(iemOp_3Dnow)
2081{
2082 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2083 {
2084 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2085 return IEMOP_RAISE_INVALID_OPCODE();
2086 }
2087
2088#ifdef IEM_WITH_3DNOW
2089 /* This is pretty sparse, use switch instead of table. */
2090 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2091 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2092#else
2093 IEMOP_BITCH_ABOUT_STUB();
2094 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2095#endif
2096}
2097
2098
2099/**
2100 * @opcode 0x10
2101 * @oppfx none
2102 * @opcpuid sse
2103 * @opgroup og_sse_simdfp_datamove
2104 * @opxcpttype 4UA
2105 * @optest op1=1 op2=2 -> op1=2
2106 * @optest op1=0 op2=-22 -> op1=-22
2107 */
2108FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2109{
2110 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2111 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2112 if (IEM_IS_MODRM_REG_MODE(bRm))
2113 {
2114 /*
2115 * XMM128, XMM128.
2116 */
2117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2118 IEM_MC_BEGIN(0, 0);
2119 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2120 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2121 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2122 IEM_GET_MODRM_RM(pVCpu, bRm));
2123 IEM_MC_ADVANCE_RIP_AND_FINISH();
2124 IEM_MC_END();
2125 }
2126 else
2127 {
2128 /*
2129 * XMM128, [mem128].
2130 */
2131 IEM_MC_BEGIN(0, 2);
2132 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2134
2135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2137 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2138 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2139
2140 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2141 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2142
2143 IEM_MC_ADVANCE_RIP_AND_FINISH();
2144 IEM_MC_END();
2145 }
2146
2147}
2148
2149
2150/**
2151 * @opcode 0x10
2152 * @oppfx 0x66
2153 * @opcpuid sse2
2154 * @opgroup og_sse2_pcksclr_datamove
2155 * @opxcpttype 4UA
2156 * @optest op1=1 op2=2 -> op1=2
2157 * @optest op1=0 op2=-42 -> op1=-42
2158 */
2159FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2160{
2161 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2163 if (IEM_IS_MODRM_REG_MODE(bRm))
2164 {
2165 /*
2166 * XMM128, XMM128.
2167 */
2168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2169 IEM_MC_BEGIN(0, 0);
2170 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2171 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2172 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2173 IEM_GET_MODRM_RM(pVCpu, bRm));
2174 IEM_MC_ADVANCE_RIP_AND_FINISH();
2175 IEM_MC_END();
2176 }
2177 else
2178 {
2179 /*
2180 * XMM128, [mem128].
2181 */
2182 IEM_MC_BEGIN(0, 2);
2183 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2185
2186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2188 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2189 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2190
2191 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2192 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2193
2194 IEM_MC_ADVANCE_RIP_AND_FINISH();
2195 IEM_MC_END();
2196 }
2197}
2198
2199
2200/**
2201 * @opcode 0x10
2202 * @oppfx 0xf3
2203 * @opcpuid sse
2204 * @opgroup og_sse_simdfp_datamove
2205 * @opxcpttype 5
2206 * @optest op1=1 op2=2 -> op1=2
2207 * @optest op1=0 op2=-22 -> op1=-22
2208 */
2209FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2210{
2211 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2213 if (IEM_IS_MODRM_REG_MODE(bRm))
2214 {
2215 /*
2216 * XMM32, XMM32.
2217 */
2218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2219 IEM_MC_BEGIN(0, 1);
2220 IEM_MC_LOCAL(uint32_t, uSrc);
2221
2222 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2223 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2224 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2225 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2226
2227 IEM_MC_ADVANCE_RIP_AND_FINISH();
2228 IEM_MC_END();
2229 }
2230 else
2231 {
2232 /*
2233 * XMM128, [mem32].
2234 */
2235 IEM_MC_BEGIN(0, 2);
2236 IEM_MC_LOCAL(uint32_t, uSrc);
2237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2238
2239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2243
2244 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2245 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2246
2247 IEM_MC_ADVANCE_RIP_AND_FINISH();
2248 IEM_MC_END();
2249 }
2250}
2251
2252
2253/**
2254 * @opcode 0x10
2255 * @oppfx 0xf2
2256 * @opcpuid sse2
2257 * @opgroup og_sse2_pcksclr_datamove
2258 * @opxcpttype 5
2259 * @optest op1=1 op2=2 -> op1=2
2260 * @optest op1=0 op2=-42 -> op1=-42
2261 */
2262FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2263{
2264 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2265 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2266 if (IEM_IS_MODRM_REG_MODE(bRm))
2267 {
2268 /*
2269 * XMM64, XMM64.
2270 */
2271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2272 IEM_MC_BEGIN(0, 1);
2273 IEM_MC_LOCAL(uint64_t, uSrc);
2274
2275 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2276 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2277 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2278 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2279
2280 IEM_MC_ADVANCE_RIP_AND_FINISH();
2281 IEM_MC_END();
2282 }
2283 else
2284 {
2285 /*
2286 * XMM128, [mem64].
2287 */
2288 IEM_MC_BEGIN(0, 2);
2289 IEM_MC_LOCAL(uint64_t, uSrc);
2290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2291
2292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2294 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2295 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2296
2297 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2298 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2299
2300 IEM_MC_ADVANCE_RIP_AND_FINISH();
2301 IEM_MC_END();
2302 }
2303}
2304
2305
2306/**
2307 * @opcode 0x11
2308 * @oppfx none
2309 * @opcpuid sse
2310 * @opgroup og_sse_simdfp_datamove
2311 * @opxcpttype 4UA
2312 * @optest op1=1 op2=2 -> op1=2
2313 * @optest op1=0 op2=-42 -> op1=-42
2314 */
2315FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2316{
2317 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2319 if (IEM_IS_MODRM_REG_MODE(bRm))
2320 {
2321 /*
2322 * XMM128, XMM128.
2323 */
2324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2325 IEM_MC_BEGIN(0, 0);
2326 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2327 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2328 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2329 IEM_GET_MODRM_REG(pVCpu, bRm));
2330 IEM_MC_ADVANCE_RIP_AND_FINISH();
2331 IEM_MC_END();
2332 }
2333 else
2334 {
2335 /*
2336 * [mem128], XMM128.
2337 */
2338 IEM_MC_BEGIN(0, 2);
2339 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2341
2342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2344 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2345 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2346
2347 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2348 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2349
2350 IEM_MC_ADVANCE_RIP_AND_FINISH();
2351 IEM_MC_END();
2352 }
2353}
2354
2355
2356/**
2357 * @opcode 0x11
2358 * @oppfx 0x66
2359 * @opcpuid sse2
2360 * @opgroup og_sse2_pcksclr_datamove
2361 * @opxcpttype 4UA
2362 * @optest op1=1 op2=2 -> op1=2
2363 * @optest op1=0 op2=-42 -> op1=-42
2364 */
2365FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2366{
2367 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2369 if (IEM_IS_MODRM_REG_MODE(bRm))
2370 {
2371 /*
2372 * XMM128, XMM128.
2373 */
2374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2375 IEM_MC_BEGIN(0, 0);
2376 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2377 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2378 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2379 IEM_GET_MODRM_REG(pVCpu, bRm));
2380 IEM_MC_ADVANCE_RIP_AND_FINISH();
2381 IEM_MC_END();
2382 }
2383 else
2384 {
2385 /*
2386 * [mem128], XMM128.
2387 */
2388 IEM_MC_BEGIN(0, 2);
2389 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2391
2392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2394 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2395 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2396
2397 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2398 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2399
2400 IEM_MC_ADVANCE_RIP_AND_FINISH();
2401 IEM_MC_END();
2402 }
2403}
2404
2405
2406/**
2407 * @opcode 0x11
2408 * @oppfx 0xf3
2409 * @opcpuid sse
2410 * @opgroup og_sse_simdfp_datamove
2411 * @opxcpttype 5
2412 * @optest op1=1 op2=2 -> op1=2
2413 * @optest op1=0 op2=-22 -> op1=-22
2414 */
2415FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2416{
2417 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2419 if (IEM_IS_MODRM_REG_MODE(bRm))
2420 {
2421 /*
2422 * XMM32, XMM32.
2423 */
2424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2425 IEM_MC_BEGIN(0, 1);
2426 IEM_MC_LOCAL(uint32_t, uSrc);
2427
2428 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2429 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2430 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2431 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2432
2433 IEM_MC_ADVANCE_RIP_AND_FINISH();
2434 IEM_MC_END();
2435 }
2436 else
2437 {
2438 /*
2439 * [mem32], XMM32.
2440 */
2441 IEM_MC_BEGIN(0, 2);
2442 IEM_MC_LOCAL(uint32_t, uSrc);
2443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2444
2445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2447 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2448 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2449
2450 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2451 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2452
2453 IEM_MC_ADVANCE_RIP_AND_FINISH();
2454 IEM_MC_END();
2455 }
2456}
2457
2458
2459/**
2460 * @opcode 0x11
2461 * @oppfx 0xf2
2462 * @opcpuid sse2
2463 * @opgroup og_sse2_pcksclr_datamove
2464 * @opxcpttype 5
2465 * @optest op1=1 op2=2 -> op1=2
2466 * @optest op1=0 op2=-42 -> op1=-42
2467 */
2468FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2469{
2470 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2472 if (IEM_IS_MODRM_REG_MODE(bRm))
2473 {
2474 /*
2475 * XMM64, XMM64.
2476 */
2477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2478 IEM_MC_BEGIN(0, 1);
2479 IEM_MC_LOCAL(uint64_t, uSrc);
2480
2481 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2482 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2483 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2484 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2485
2486 IEM_MC_ADVANCE_RIP_AND_FINISH();
2487 IEM_MC_END();
2488 }
2489 else
2490 {
2491 /*
2492 * [mem64], XMM64.
2493 */
2494 IEM_MC_BEGIN(0, 2);
2495 IEM_MC_LOCAL(uint64_t, uSrc);
2496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2497
2498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2500 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2502
2503 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2504 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2505
2506 IEM_MC_ADVANCE_RIP_AND_FINISH();
2507 IEM_MC_END();
2508 }
2509}
2510
2511
2512FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2513{
2514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2515 if (IEM_IS_MODRM_REG_MODE(bRm))
2516 {
2517 /**
2518 * @opcode 0x12
2519 * @opcodesub 11 mr/reg
2520 * @oppfx none
2521 * @opcpuid sse
2522 * @opgroup og_sse_simdfp_datamove
2523 * @opxcpttype 5
2524 * @optest op1=1 op2=2 -> op1=2
2525 * @optest op1=0 op2=-42 -> op1=-42
2526 */
2527 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2528
2529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2530 IEM_MC_BEGIN(0, 1);
2531 IEM_MC_LOCAL(uint64_t, uSrc);
2532
2533 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2534 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2535 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2536 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2537
2538 IEM_MC_ADVANCE_RIP_AND_FINISH();
2539 IEM_MC_END();
2540 }
2541 else
2542 {
2543 /**
2544 * @opdone
2545 * @opcode 0x12
2546 * @opcodesub !11 mr/reg
2547 * @oppfx none
2548 * @opcpuid sse
2549 * @opgroup og_sse_simdfp_datamove
2550 * @opxcpttype 5
2551 * @optest op1=1 op2=2 -> op1=2
2552 * @optest op1=0 op2=-42 -> op1=-42
2553 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2554 */
2555 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2556
2557 IEM_MC_BEGIN(0, 2);
2558 IEM_MC_LOCAL(uint64_t, uSrc);
2559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2560
2561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2563 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2564 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2565
2566 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2567 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2568
2569 IEM_MC_ADVANCE_RIP_AND_FINISH();
2570 IEM_MC_END();
2571 }
2572}
2573
2574
2575/**
2576 * @opcode 0x12
2577 * @opcodesub !11 mr/reg
2578 * @oppfx 0x66
2579 * @opcpuid sse2
2580 * @opgroup og_sse2_pcksclr_datamove
2581 * @opxcpttype 5
2582 * @optest op1=1 op2=2 -> op1=2
2583 * @optest op1=0 op2=-42 -> op1=-42
2584 */
2585FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2586{
2587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2588 if (IEM_IS_MODRM_MEM_MODE(bRm))
2589 {
2590 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2591
2592 IEM_MC_BEGIN(0, 2);
2593 IEM_MC_LOCAL(uint64_t, uSrc);
2594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2595
2596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2598 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2599 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2600
2601 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2602 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2603
2604 IEM_MC_ADVANCE_RIP_AND_FINISH();
2605 IEM_MC_END();
2606 }
2607
2608 /**
2609 * @opdone
2610 * @opmnemonic ud660f12m3
2611 * @opcode 0x12
2612 * @opcodesub 11 mr/reg
2613 * @oppfx 0x66
2614 * @opunused immediate
2615 * @opcpuid sse
2616 * @optest ->
2617 */
2618 else
2619 return IEMOP_RAISE_INVALID_OPCODE();
2620}
2621
2622
2623/**
2624 * @opcode 0x12
2625 * @oppfx 0xf3
2626 * @opcpuid sse3
2627 * @opgroup og_sse3_pcksclr_datamove
2628 * @opxcpttype 4
2629 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2630 * op1=0x00000002000000020000000100000001
2631 */
2632FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2633{
2634 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2636 if (IEM_IS_MODRM_REG_MODE(bRm))
2637 {
2638 /*
2639 * XMM, XMM.
2640 */
2641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2642 IEM_MC_BEGIN(0, 1);
2643 IEM_MC_LOCAL(RTUINT128U, uSrc);
2644
2645 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2646 IEM_MC_PREPARE_SSE_USAGE();
2647
2648 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2649 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2650 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2651 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2652 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2653
2654 IEM_MC_ADVANCE_RIP_AND_FINISH();
2655 IEM_MC_END();
2656 }
2657 else
2658 {
2659 /*
2660 * XMM, [mem128].
2661 */
2662 IEM_MC_BEGIN(0, 2);
2663 IEM_MC_LOCAL(RTUINT128U, uSrc);
2664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2665
2666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2668 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2669 IEM_MC_PREPARE_SSE_USAGE();
2670
2671 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2672 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2673 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2674 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2675 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2676
2677 IEM_MC_ADVANCE_RIP_AND_FINISH();
2678 IEM_MC_END();
2679 }
2680}
2681
2682
2683/**
2684 * @opcode 0x12
2685 * @oppfx 0xf2
2686 * @opcpuid sse3
2687 * @opgroup og_sse3_pcksclr_datamove
2688 * @opxcpttype 5
2689 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2690 * op1=0x22222222111111112222222211111111
2691 */
2692FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2693{
2694 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2696 if (IEM_IS_MODRM_REG_MODE(bRm))
2697 {
2698 /*
2699 * XMM128, XMM64.
2700 */
2701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2702 IEM_MC_BEGIN(1, 0);
2703 IEM_MC_ARG(uint64_t, uSrc, 0);
2704
2705 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2706 IEM_MC_PREPARE_SSE_USAGE();
2707
2708 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2709 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2710 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2711
2712 IEM_MC_ADVANCE_RIP_AND_FINISH();
2713 IEM_MC_END();
2714 }
2715 else
2716 {
2717 /*
2718 * XMM128, [mem64].
2719 */
2720 IEM_MC_BEGIN(1, 1);
2721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2722 IEM_MC_ARG(uint64_t, uSrc, 0);
2723
2724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2726 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2727 IEM_MC_PREPARE_SSE_USAGE();
2728
2729 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2730 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2731 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2732
2733 IEM_MC_ADVANCE_RIP_AND_FINISH();
2734 IEM_MC_END();
2735 }
2736}
2737
2738
2739/**
2740 * @opcode 0x13
2741 * @opcodesub !11 mr/reg
2742 * @oppfx none
2743 * @opcpuid sse
2744 * @opgroup og_sse_simdfp_datamove
2745 * @opxcpttype 5
2746 * @optest op1=1 op2=2 -> op1=2
2747 * @optest op1=0 op2=-42 -> op1=-42
2748 */
2749FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2750{
2751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2752 if (IEM_IS_MODRM_MEM_MODE(bRm))
2753 {
2754 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2755
2756 IEM_MC_BEGIN(0, 2);
2757 IEM_MC_LOCAL(uint64_t, uSrc);
2758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2759
2760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2762 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2763 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2764
2765 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2766 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2767
2768 IEM_MC_ADVANCE_RIP_AND_FINISH();
2769 IEM_MC_END();
2770 }
2771
2772 /**
2773 * @opdone
2774 * @opmnemonic ud0f13m3
2775 * @opcode 0x13
2776 * @opcodesub 11 mr/reg
2777 * @oppfx none
2778 * @opunused immediate
2779 * @opcpuid sse
2780 * @optest ->
2781 */
2782 else
2783 return IEMOP_RAISE_INVALID_OPCODE();
2784}
2785
2786
2787/**
2788 * @opcode 0x13
2789 * @opcodesub !11 mr/reg
2790 * @oppfx 0x66
2791 * @opcpuid sse2
2792 * @opgroup og_sse2_pcksclr_datamove
2793 * @opxcpttype 5
2794 * @optest op1=1 op2=2 -> op1=2
2795 * @optest op1=0 op2=-42 -> op1=-42
2796 */
2797FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2798{
2799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2800 if (IEM_IS_MODRM_MEM_MODE(bRm))
2801 {
2802 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2803 IEM_MC_BEGIN(0, 2);
2804 IEM_MC_LOCAL(uint64_t, uSrc);
2805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2806
2807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2809 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2810 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2811
2812 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2813 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2814
2815 IEM_MC_ADVANCE_RIP_AND_FINISH();
2816 IEM_MC_END();
2817 }
2818
2819 /**
2820 * @opdone
2821 * @opmnemonic ud660f13m3
2822 * @opcode 0x13
2823 * @opcodesub 11 mr/reg
2824 * @oppfx 0x66
2825 * @opunused immediate
2826 * @opcpuid sse
2827 * @optest ->
2828 */
2829 else
2830 return IEMOP_RAISE_INVALID_OPCODE();
2831}
2832
2833
2834/**
2835 * @opmnemonic udf30f13
2836 * @opcode 0x13
2837 * @oppfx 0xf3
2838 * @opunused intel-modrm
2839 * @opcpuid sse
2840 * @optest ->
2841 * @opdone
2842 */
2843
2844/**
2845 * @opmnemonic udf20f13
2846 * @opcode 0x13
2847 * @oppfx 0xf2
2848 * @opunused intel-modrm
2849 * @opcpuid sse
2850 * @optest ->
2851 * @opdone
2852 */
2853
2854/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2855FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2856{
2857 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2858 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2859}
2860
2861
2862/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2863FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2864{
2865 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2866 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2867}
2868
2869
2870/**
2871 * @opdone
2872 * @opmnemonic udf30f14
2873 * @opcode 0x14
2874 * @oppfx 0xf3
2875 * @opunused intel-modrm
2876 * @opcpuid sse
2877 * @optest ->
2878 * @opdone
2879 */
2880
2881/**
2882 * @opmnemonic udf20f14
2883 * @opcode 0x14
2884 * @oppfx 0xf2
2885 * @opunused intel-modrm
2886 * @opcpuid sse
2887 * @optest ->
2888 * @opdone
2889 */
2890
2891/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2892FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2893{
2894 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2895 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2896}
2897
2898
2899/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2900FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2901{
2902 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2903 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2904}
2905
2906
2907/* Opcode 0xf3 0x0f 0x15 - invalid */
2908/* Opcode 0xf2 0x0f 0x15 - invalid */
2909
2910/**
2911 * @opdone
2912 * @opmnemonic udf30f15
2913 * @opcode 0x15
2914 * @oppfx 0xf3
2915 * @opunused intel-modrm
2916 * @opcpuid sse
2917 * @optest ->
2918 * @opdone
2919 */
2920
2921/**
2922 * @opmnemonic udf20f15
2923 * @opcode 0x15
2924 * @oppfx 0xf2
2925 * @opunused intel-modrm
2926 * @opcpuid sse
2927 * @optest ->
2928 * @opdone
2929 */
2930
2931FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2932{
2933 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2934 if (IEM_IS_MODRM_REG_MODE(bRm))
2935 {
2936 /**
2937 * @opcode 0x16
2938 * @opcodesub 11 mr/reg
2939 * @oppfx none
2940 * @opcpuid sse
2941 * @opgroup og_sse_simdfp_datamove
2942 * @opxcpttype 5
2943 * @optest op1=1 op2=2 -> op1=2
2944 * @optest op1=0 op2=-42 -> op1=-42
2945 */
2946 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2947
2948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2949 IEM_MC_BEGIN(0, 1);
2950 IEM_MC_LOCAL(uint64_t, uSrc);
2951
2952 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2953 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2954 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2955 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2956
2957 IEM_MC_ADVANCE_RIP_AND_FINISH();
2958 IEM_MC_END();
2959 }
2960 else
2961 {
2962 /**
2963 * @opdone
2964 * @opcode 0x16
2965 * @opcodesub !11 mr/reg
2966 * @oppfx none
2967 * @opcpuid sse
2968 * @opgroup og_sse_simdfp_datamove
2969 * @opxcpttype 5
2970 * @optest op1=1 op2=2 -> op1=2
2971 * @optest op1=0 op2=-42 -> op1=-42
2972 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2973 */
2974 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2975
2976 IEM_MC_BEGIN(0, 2);
2977 IEM_MC_LOCAL(uint64_t, uSrc);
2978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2979
2980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2982 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2983 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2984
2985 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2986 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2987
2988 IEM_MC_ADVANCE_RIP_AND_FINISH();
2989 IEM_MC_END();
2990 }
2991}
2992
2993
2994/**
2995 * @opcode 0x16
2996 * @opcodesub !11 mr/reg
2997 * @oppfx 0x66
2998 * @opcpuid sse2
2999 * @opgroup og_sse2_pcksclr_datamove
3000 * @opxcpttype 5
3001 * @optest op1=1 op2=2 -> op1=2
3002 * @optest op1=0 op2=-42 -> op1=-42
3003 */
3004FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
3005{
3006 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3007 if (IEM_IS_MODRM_MEM_MODE(bRm))
3008 {
3009 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3010 IEM_MC_BEGIN(0, 2);
3011 IEM_MC_LOCAL(uint64_t, uSrc);
3012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3013
3014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3016 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3017 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3018
3019 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3020 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3021
3022 IEM_MC_ADVANCE_RIP_AND_FINISH();
3023 IEM_MC_END();
3024 }
3025
3026 /**
3027 * @opdone
3028 * @opmnemonic ud660f16m3
3029 * @opcode 0x16
3030 * @opcodesub 11 mr/reg
3031 * @oppfx 0x66
3032 * @opunused immediate
3033 * @opcpuid sse
3034 * @optest ->
3035 */
3036 else
3037 return IEMOP_RAISE_INVALID_OPCODE();
3038}
3039
3040
3041/**
3042 * @opcode 0x16
3043 * @oppfx 0xf3
3044 * @opcpuid sse3
3045 * @opgroup og_sse3_pcksclr_datamove
3046 * @opxcpttype 4
3047 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3048 * op1=0x00000002000000020000000100000001
3049 */
3050FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3051{
3052 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3053 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3054 if (IEM_IS_MODRM_REG_MODE(bRm))
3055 {
3056 /*
3057 * XMM128, XMM128.
3058 */
3059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3060 IEM_MC_BEGIN(0, 1);
3061 IEM_MC_LOCAL(RTUINT128U, uSrc);
3062
3063 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3064 IEM_MC_PREPARE_SSE_USAGE();
3065
3066 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3067 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3068 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3069 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3070 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3071
3072 IEM_MC_ADVANCE_RIP_AND_FINISH();
3073 IEM_MC_END();
3074 }
3075 else
3076 {
3077 /*
3078 * XMM128, [mem128].
3079 */
3080 IEM_MC_BEGIN(0, 2);
3081 IEM_MC_LOCAL(RTUINT128U, uSrc);
3082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3083
3084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3086 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3087 IEM_MC_PREPARE_SSE_USAGE();
3088
3089 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3090 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3091 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3092 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3093 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3094
3095 IEM_MC_ADVANCE_RIP_AND_FINISH();
3096 IEM_MC_END();
3097 }
3098}
3099
3100/**
3101 * @opdone
3102 * @opmnemonic udf30f16
3103 * @opcode 0x16
3104 * @oppfx 0xf2
3105 * @opunused intel-modrm
3106 * @opcpuid sse
3107 * @optest ->
3108 * @opdone
3109 */
3110
3111
3112/**
3113 * @opcode 0x17
3114 * @opcodesub !11 mr/reg
3115 * @oppfx none
3116 * @opcpuid sse
3117 * @opgroup og_sse_simdfp_datamove
3118 * @opxcpttype 5
3119 * @optest op1=1 op2=2 -> op1=2
3120 * @optest op1=0 op2=-42 -> op1=-42
3121 */
3122FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3123{
3124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3125 if (IEM_IS_MODRM_MEM_MODE(bRm))
3126 {
3127 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3128
3129 IEM_MC_BEGIN(0, 2);
3130 IEM_MC_LOCAL(uint64_t, uSrc);
3131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3132
3133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3135 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3136 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3137
3138 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3139 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3140
3141 IEM_MC_ADVANCE_RIP_AND_FINISH();
3142 IEM_MC_END();
3143 }
3144
3145 /**
3146 * @opdone
3147 * @opmnemonic ud0f17m3
3148 * @opcode 0x17
3149 * @opcodesub 11 mr/reg
3150 * @oppfx none
3151 * @opunused immediate
3152 * @opcpuid sse
3153 * @optest ->
3154 */
3155 else
3156 return IEMOP_RAISE_INVALID_OPCODE();
3157}
3158
3159
3160/**
3161 * @opcode 0x17
3162 * @opcodesub !11 mr/reg
3163 * @oppfx 0x66
3164 * @opcpuid sse2
3165 * @opgroup og_sse2_pcksclr_datamove
3166 * @opxcpttype 5
3167 * @optest op1=1 op2=2 -> op1=2
3168 * @optest op1=0 op2=-42 -> op1=-42
3169 */
3170FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3171{
3172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3173 if (IEM_IS_MODRM_MEM_MODE(bRm))
3174 {
3175 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3176
3177 IEM_MC_BEGIN(0, 2);
3178 IEM_MC_LOCAL(uint64_t, uSrc);
3179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3180
3181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3183 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3184 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3185
3186 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3187 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3188
3189 IEM_MC_ADVANCE_RIP_AND_FINISH();
3190 IEM_MC_END();
3191 }
3192
3193 /**
3194 * @opdone
3195 * @opmnemonic ud660f17m3
3196 * @opcode 0x17
3197 * @opcodesub 11 mr/reg
3198 * @oppfx 0x66
3199 * @opunused immediate
3200 * @opcpuid sse
3201 * @optest ->
3202 */
3203 else
3204 return IEMOP_RAISE_INVALID_OPCODE();
3205}
3206
3207
3208/**
3209 * @opdone
3210 * @opmnemonic udf30f17
3211 * @opcode 0x17
3212 * @oppfx 0xf3
3213 * @opunused intel-modrm
3214 * @opcpuid sse
3215 * @optest ->
3216 * @opdone
3217 */
3218
3219/**
3220 * @opmnemonic udf20f17
3221 * @opcode 0x17
3222 * @oppfx 0xf2
3223 * @opunused intel-modrm
3224 * @opcpuid sse
3225 * @optest ->
3226 * @opdone
3227 */
3228
3229
3230/** Opcode 0x0f 0x18. */
3231FNIEMOP_DEF(iemOp_prefetch_Grp16)
3232{
3233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3234 if (IEM_IS_MODRM_MEM_MODE(bRm))
3235 {
3236 switch (IEM_GET_MODRM_REG_8(bRm))
3237 {
3238 case 4: /* Aliased to /0 for the time being according to AMD. */
3239 case 5: /* Aliased to /0 for the time being according to AMD. */
3240 case 6: /* Aliased to /0 for the time being according to AMD. */
3241 case 7: /* Aliased to /0 for the time being according to AMD. */
3242 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3243 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3244 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3245 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3246 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3247 }
3248
3249 IEM_MC_BEGIN(0, 1);
3250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3253 /* Currently a NOP. */
3254 NOREF(GCPtrEffSrc);
3255 IEM_MC_ADVANCE_RIP_AND_FINISH();
3256 IEM_MC_END();
3257 }
3258 else
3259 return IEMOP_RAISE_INVALID_OPCODE();
3260}
3261
3262
3263/** Opcode 0x0f 0x19..0x1f. */
3264FNIEMOP_DEF(iemOp_nop_Ev)
3265{
3266 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3267 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3268 if (IEM_IS_MODRM_REG_MODE(bRm))
3269 {
3270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3271 IEM_MC_BEGIN(0, 0);
3272 IEM_MC_ADVANCE_RIP_AND_FINISH();
3273 IEM_MC_END();
3274 }
3275 else
3276 {
3277 IEM_MC_BEGIN(0, 1);
3278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3281 /* Currently a NOP. */
3282 NOREF(GCPtrEffSrc);
3283 IEM_MC_ADVANCE_RIP_AND_FINISH();
3284 IEM_MC_END();
3285 }
3286}
3287
3288
3289/** Opcode 0x0f 0x20. */
3290FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3291{
3292 /* mod is ignored, as is operand size overrides. */
3293 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3294 IEMOP_HLP_MIN_386();
3295 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3296 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3297 else
3298 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3299
3300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3301 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3302 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3303 {
3304 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3305 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3306 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3307 iCrReg |= 8;
3308 }
3309 switch (iCrReg)
3310 {
3311 case 0: case 2: case 3: case 4: case 8:
3312 break;
3313 default:
3314 return IEMOP_RAISE_INVALID_OPCODE();
3315 }
3316 IEMOP_HLP_DONE_DECODING();
3317
3318 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3319}
3320
3321
3322/** Opcode 0x0f 0x21. */
3323FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3324{
3325 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3326 IEMOP_HLP_MIN_386();
3327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3329 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3330 return IEMOP_RAISE_INVALID_OPCODE();
3331 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
3332 IEM_GET_MODRM_RM(pVCpu, bRm),
3333 IEM_GET_MODRM_REG_8(bRm));
3334}
3335
3336
3337/** Opcode 0x0f 0x22. */
3338FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3339{
3340 /* mod is ignored, as is operand size overrides. */
3341 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3342 IEMOP_HLP_MIN_386();
3343 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3344 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3345 else
3346 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3347
3348 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3349 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3350 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3351 {
3352 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3353 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3354 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3355 iCrReg |= 8;
3356 }
3357 switch (iCrReg)
3358 {
3359 case 0: case 2: case 3: case 4: case 8:
3360 break;
3361 default:
3362 return IEMOP_RAISE_INVALID_OPCODE();
3363 }
3364 IEMOP_HLP_DONE_DECODING();
3365
3366 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3367}
3368
3369
3370/** Opcode 0x0f 0x23. */
3371FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3372{
3373 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3374 IEMOP_HLP_MIN_386();
3375 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3377 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3378 return IEMOP_RAISE_INVALID_OPCODE();
3379 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
3380 IEM_GET_MODRM_REG_8(bRm),
3381 IEM_GET_MODRM_RM(pVCpu, bRm));
3382}
3383
3384
3385/** Opcode 0x0f 0x24. */
3386FNIEMOP_DEF(iemOp_mov_Rd_Td)
3387{
3388 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3389 IEMOP_HLP_MIN_386();
3390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3392 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3393 return IEMOP_RAISE_INVALID_OPCODE();
3394 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
3395 IEM_GET_MODRM_RM(pVCpu, bRm),
3396 IEM_GET_MODRM_REG_8(bRm));
3397}
3398
3399
3400/** Opcode 0x0f 0x26. */
3401FNIEMOP_DEF(iemOp_mov_Td_Rd)
3402{
3403 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3404 IEMOP_HLP_MIN_386();
3405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3407 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3408 return IEMOP_RAISE_INVALID_OPCODE();
3409 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
3410 IEM_GET_MODRM_REG_8(bRm),
3411 IEM_GET_MODRM_RM(pVCpu, bRm));
3412}
3413
3414
3415/**
3416 * @opcode 0x28
3417 * @oppfx none
3418 * @opcpuid sse
3419 * @opgroup og_sse_simdfp_datamove
3420 * @opxcpttype 1
3421 * @optest op1=1 op2=2 -> op1=2
3422 * @optest op1=0 op2=-42 -> op1=-42
3423 */
3424FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3425{
3426 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3427 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3428 if (IEM_IS_MODRM_REG_MODE(bRm))
3429 {
3430 /*
3431 * Register, register.
3432 */
3433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3434 IEM_MC_BEGIN(0, 0);
3435 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3436 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3437 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3438 IEM_GET_MODRM_RM(pVCpu, bRm));
3439 IEM_MC_ADVANCE_RIP_AND_FINISH();
3440 IEM_MC_END();
3441 }
3442 else
3443 {
3444 /*
3445 * Register, memory.
3446 */
3447 IEM_MC_BEGIN(0, 2);
3448 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3450
3451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3453 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3454 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3455
3456 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3457 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3458
3459 IEM_MC_ADVANCE_RIP_AND_FINISH();
3460 IEM_MC_END();
3461 }
3462}
3463
3464/**
3465 * @opcode 0x28
3466 * @oppfx 66
3467 * @opcpuid sse2
3468 * @opgroup og_sse2_pcksclr_datamove
3469 * @opxcpttype 1
3470 * @optest op1=1 op2=2 -> op1=2
3471 * @optest op1=0 op2=-42 -> op1=-42
3472 */
3473FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3474{
3475 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3476 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3477 if (IEM_IS_MODRM_REG_MODE(bRm))
3478 {
3479 /*
3480 * Register, register.
3481 */
3482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3483 IEM_MC_BEGIN(0, 0);
3484 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3485 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3486 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3487 IEM_GET_MODRM_RM(pVCpu, bRm));
3488 IEM_MC_ADVANCE_RIP_AND_FINISH();
3489 IEM_MC_END();
3490 }
3491 else
3492 {
3493 /*
3494 * Register, memory.
3495 */
3496 IEM_MC_BEGIN(0, 2);
3497 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3499
3500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3502 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3503 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3504
3505 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3506 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3507
3508 IEM_MC_ADVANCE_RIP_AND_FINISH();
3509 IEM_MC_END();
3510 }
3511}
3512
3513/* Opcode 0xf3 0x0f 0x28 - invalid */
3514/* Opcode 0xf2 0x0f 0x28 - invalid */
3515
3516/**
3517 * @opcode 0x29
3518 * @oppfx none
3519 * @opcpuid sse
3520 * @opgroup og_sse_simdfp_datamove
3521 * @opxcpttype 1
3522 * @optest op1=1 op2=2 -> op1=2
3523 * @optest op1=0 op2=-42 -> op1=-42
3524 */
3525FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3526{
3527 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3529 if (IEM_IS_MODRM_REG_MODE(bRm))
3530 {
3531 /*
3532 * Register, register.
3533 */
3534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3535 IEM_MC_BEGIN(0, 0);
3536 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3537 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3538 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3539 IEM_GET_MODRM_REG(pVCpu, bRm));
3540 IEM_MC_ADVANCE_RIP_AND_FINISH();
3541 IEM_MC_END();
3542 }
3543 else
3544 {
3545 /*
3546 * Memory, register.
3547 */
3548 IEM_MC_BEGIN(0, 2);
3549 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3551
3552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3554 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3555 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3556
3557 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3558 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3559
3560 IEM_MC_ADVANCE_RIP_AND_FINISH();
3561 IEM_MC_END();
3562 }
3563}
3564
3565/**
3566 * @opcode 0x29
3567 * @oppfx 66
3568 * @opcpuid sse2
3569 * @opgroup og_sse2_pcksclr_datamove
3570 * @opxcpttype 1
3571 * @optest op1=1 op2=2 -> op1=2
3572 * @optest op1=0 op2=-42 -> op1=-42
3573 */
3574FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3575{
3576 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3577 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3578 if (IEM_IS_MODRM_REG_MODE(bRm))
3579 {
3580 /*
3581 * Register, register.
3582 */
3583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3584 IEM_MC_BEGIN(0, 0);
3585 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3586 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3587 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3588 IEM_GET_MODRM_REG(pVCpu, bRm));
3589 IEM_MC_ADVANCE_RIP_AND_FINISH();
3590 IEM_MC_END();
3591 }
3592 else
3593 {
3594 /*
3595 * Memory, register.
3596 */
3597 IEM_MC_BEGIN(0, 2);
3598 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3600
3601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3603 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3604 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3605
3606 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3607 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3608
3609 IEM_MC_ADVANCE_RIP_AND_FINISH();
3610 IEM_MC_END();
3611 }
3612}
3613
3614/* Opcode 0xf3 0x0f 0x29 - invalid */
3615/* Opcode 0xf2 0x0f 0x29 - invalid */
3616
3617
3618/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3619FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3620{
3621 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
3622 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3623 if (IEM_IS_MODRM_REG_MODE(bRm))
3624 {
3625 /*
3626 * XMM, MMX
3627 */
3628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3629
3630 IEM_MC_BEGIN(3, 1);
3631 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3632 IEM_MC_LOCAL(X86XMMREG, Dst);
3633 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3634 IEM_MC_ARG(uint64_t, u64Src, 2);
3635 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3636 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3637 IEM_MC_PREPARE_FPU_USAGE();
3638 IEM_MC_FPU_TO_MMX_MODE();
3639
3640 IEM_MC_REF_MXCSR(pfMxcsr);
3641 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3642 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3643
3644 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3645 IEM_MC_IF_MXCSR_XCPT_PENDING()
3646 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3647 IEM_MC_ELSE()
3648 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3649 IEM_MC_ENDIF();
3650
3651 IEM_MC_ADVANCE_RIP_AND_FINISH();
3652 IEM_MC_END();
3653 }
3654 else
3655 {
3656 /*
3657 * XMM, [mem64]
3658 */
3659 IEM_MC_BEGIN(3, 2);
3660 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3661 IEM_MC_LOCAL(X86XMMREG, Dst);
3662 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3663 IEM_MC_ARG(uint64_t, u64Src, 2);
3664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3665
3666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3668 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3669 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3670 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3671
3672 IEM_MC_PREPARE_FPU_USAGE();
3673 IEM_MC_FPU_TO_MMX_MODE();
3674 IEM_MC_REF_MXCSR(pfMxcsr);
3675
3676 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3677 IEM_MC_IF_MXCSR_XCPT_PENDING()
3678 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3679 IEM_MC_ELSE()
3680 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3681 IEM_MC_ENDIF();
3682
3683 IEM_MC_ADVANCE_RIP_AND_FINISH();
3684 IEM_MC_END();
3685 }
3686}
3687
3688
3689/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3690FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3691{
3692 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
3693 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3694 if (IEM_IS_MODRM_REG_MODE(bRm))
3695 {
3696 /*
3697 * XMM, MMX
3698 */
3699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3700
3701 IEM_MC_BEGIN(3, 1);
3702 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3703 IEM_MC_LOCAL(X86XMMREG, Dst);
3704 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3705 IEM_MC_ARG(uint64_t, u64Src, 2);
3706 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3707 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3708 IEM_MC_PREPARE_FPU_USAGE();
3709 IEM_MC_FPU_TO_MMX_MODE();
3710
3711 IEM_MC_REF_MXCSR(pfMxcsr);
3712 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3713
3714 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3715 IEM_MC_IF_MXCSR_XCPT_PENDING()
3716 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3717 IEM_MC_ELSE()
3718 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3719 IEM_MC_ENDIF();
3720
3721 IEM_MC_ADVANCE_RIP_AND_FINISH();
3722 IEM_MC_END();
3723 }
3724 else
3725 {
3726 /*
3727 * XMM, [mem64]
3728 */
3729 IEM_MC_BEGIN(3, 3);
3730 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3731 IEM_MC_LOCAL(X86XMMREG, Dst);
3732 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3733 IEM_MC_ARG(uint64_t, u64Src, 2);
3734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3735
3736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3738 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3739 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3740 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3741
3742 /* Doesn't cause a transition to MMX mode. */
3743 IEM_MC_PREPARE_SSE_USAGE();
3744 IEM_MC_REF_MXCSR(pfMxcsr);
3745
3746 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3747 IEM_MC_IF_MXCSR_XCPT_PENDING()
3748 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3749 IEM_MC_ELSE()
3750 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3751 IEM_MC_ENDIF();
3752
3753 IEM_MC_ADVANCE_RIP_AND_FINISH();
3754 IEM_MC_END();
3755 }
3756}
3757
3758
3759/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3760FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3761{
3762 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3763
3764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3765 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3766 {
3767 if (IEM_IS_MODRM_REG_MODE(bRm))
3768 {
3769 /* XMM, greg64 */
3770 IEM_MC_BEGIN(3, 2);
3771 IEM_MC_LOCAL(uint32_t, fMxcsr);
3772 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3773 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3774 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3775 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3776
3777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3778 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3779 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3780
3781 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3782 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3783 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3784 IEM_MC_IF_MXCSR_XCPT_PENDING()
3785 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3786 IEM_MC_ELSE()
3787 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3788 IEM_MC_ENDIF();
3789
3790 IEM_MC_ADVANCE_RIP_AND_FINISH();
3791 IEM_MC_END();
3792 }
3793 else
3794 {
3795 /* XMM, [mem64] */
3796 IEM_MC_BEGIN(3, 4);
3797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3798 IEM_MC_LOCAL(uint32_t, fMxcsr);
3799 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3800 IEM_MC_LOCAL(int64_t, i64Src);
3801 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3802 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3803 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3804
3805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3807 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3808 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3809
3810 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3811 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3812 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3813 IEM_MC_IF_MXCSR_XCPT_PENDING()
3814 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3815 IEM_MC_ELSE()
3816 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3817 IEM_MC_ENDIF();
3818
3819 IEM_MC_ADVANCE_RIP_AND_FINISH();
3820 IEM_MC_END();
3821 }
3822 }
3823 else
3824 {
3825 if (IEM_IS_MODRM_REG_MODE(bRm))
3826 {
3827 /* greg, XMM */
3828 IEM_MC_BEGIN(3, 2);
3829 IEM_MC_LOCAL(uint32_t, fMxcsr);
3830 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3831 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3832 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3833 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3834
3835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3836 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3837 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3838
3839 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3840 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3841 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3842 IEM_MC_IF_MXCSR_XCPT_PENDING()
3843 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3844 IEM_MC_ELSE()
3845 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3846 IEM_MC_ENDIF();
3847
3848 IEM_MC_ADVANCE_RIP_AND_FINISH();
3849 IEM_MC_END();
3850 }
3851 else
3852 {
3853 /* greg, [mem32] */
3854 IEM_MC_BEGIN(3, 4);
3855 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3856 IEM_MC_LOCAL(uint32_t, fMxcsr);
3857 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3858 IEM_MC_LOCAL(int32_t, i32Src);
3859 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3860 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3861 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3862
3863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3865 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3866 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3867
3868 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3869 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3870 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3871 IEM_MC_IF_MXCSR_XCPT_PENDING()
3872 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3873 IEM_MC_ELSE()
3874 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3875 IEM_MC_ENDIF();
3876
3877 IEM_MC_ADVANCE_RIP_AND_FINISH();
3878 IEM_MC_END();
3879 }
3880 }
3881}
3882
3883
3884/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3885FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3886{
3887 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3888
3889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3890 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3891 {
3892 if (IEM_IS_MODRM_REG_MODE(bRm))
3893 {
3894 /* XMM, greg64 */
3895 IEM_MC_BEGIN(3, 2);
3896 IEM_MC_LOCAL(uint32_t, fMxcsr);
3897 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3898 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3899 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3900 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3901
3902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3903 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3904 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3905
3906 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3907 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3908 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3909 IEM_MC_IF_MXCSR_XCPT_PENDING()
3910 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3911 IEM_MC_ELSE()
3912 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3913 IEM_MC_ENDIF();
3914
3915 IEM_MC_ADVANCE_RIP_AND_FINISH();
3916 IEM_MC_END();
3917 }
3918 else
3919 {
3920 /* XMM, [mem64] */
3921 IEM_MC_BEGIN(3, 4);
3922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3923 IEM_MC_LOCAL(uint32_t, fMxcsr);
3924 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3925 IEM_MC_LOCAL(int64_t, i64Src);
3926 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3927 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3928 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3929
3930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3932 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3933 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3934
3935 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3936 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3937 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3938 IEM_MC_IF_MXCSR_XCPT_PENDING()
3939 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3940 IEM_MC_ELSE()
3941 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3942 IEM_MC_ENDIF();
3943
3944 IEM_MC_ADVANCE_RIP_AND_FINISH();
3945 IEM_MC_END();
3946 }
3947 }
3948 else
3949 {
3950 if (IEM_IS_MODRM_REG_MODE(bRm))
3951 {
3952 /* XMM, greg32 */
3953 IEM_MC_BEGIN(3, 2);
3954 IEM_MC_LOCAL(uint32_t, fMxcsr);
3955 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3956 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3957 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3958 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3959
3960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3961 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3962 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3963
3964 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3965 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3966 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3967 IEM_MC_IF_MXCSR_XCPT_PENDING()
3968 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3969 IEM_MC_ELSE()
3970 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3971 IEM_MC_ENDIF();
3972
3973 IEM_MC_ADVANCE_RIP_AND_FINISH();
3974 IEM_MC_END();
3975 }
3976 else
3977 {
3978 /* XMM, [mem32] */
3979 IEM_MC_BEGIN(3, 4);
3980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3981 IEM_MC_LOCAL(uint32_t, fMxcsr);
3982 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3983 IEM_MC_LOCAL(int32_t, i32Src);
3984 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3985 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3986 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3987
3988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3990 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3991 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3992
3993 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3994 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3995 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3996 IEM_MC_IF_MXCSR_XCPT_PENDING()
3997 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3998 IEM_MC_ELSE()
3999 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
4000 IEM_MC_ENDIF();
4001
4002 IEM_MC_ADVANCE_RIP_AND_FINISH();
4003 IEM_MC_END();
4004 }
4005 }
4006}
4007
4008
4009/**
4010 * @opcode 0x2b
4011 * @opcodesub !11 mr/reg
4012 * @oppfx none
4013 * @opcpuid sse
4014 * @opgroup og_sse1_cachect
4015 * @opxcpttype 1
4016 * @optest op1=1 op2=2 -> op1=2
4017 * @optest op1=0 op2=-42 -> op1=-42
4018 */
4019FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
4020{
4021 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4022 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4023 if (IEM_IS_MODRM_MEM_MODE(bRm))
4024 {
4025 /*
4026 * memory, register.
4027 */
4028 IEM_MC_BEGIN(0, 2);
4029 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4030 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4031
4032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4034 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4035 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4036
4037 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4038 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4039
4040 IEM_MC_ADVANCE_RIP_AND_FINISH();
4041 IEM_MC_END();
4042 }
4043 /* The register, register encoding is invalid. */
4044 else
4045 return IEMOP_RAISE_INVALID_OPCODE();
4046}
4047
4048/**
4049 * @opcode 0x2b
4050 * @opcodesub !11 mr/reg
4051 * @oppfx 0x66
4052 * @opcpuid sse2
4053 * @opgroup og_sse2_cachect
4054 * @opxcpttype 1
4055 * @optest op1=1 op2=2 -> op1=2
4056 * @optest op1=0 op2=-42 -> op1=-42
4057 */
4058FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
4059{
4060 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4062 if (IEM_IS_MODRM_MEM_MODE(bRm))
4063 {
4064 /*
4065 * memory, register.
4066 */
4067 IEM_MC_BEGIN(0, 2);
4068 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4069 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4070
4071 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4073 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4074 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4075
4076 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4077 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4078
4079 IEM_MC_ADVANCE_RIP_AND_FINISH();
4080 IEM_MC_END();
4081 }
4082 /* The register, register encoding is invalid. */
4083 else
4084 return IEMOP_RAISE_INVALID_OPCODE();
4085}
4086/* Opcode 0xf3 0x0f 0x2b - invalid */
4087/* Opcode 0xf2 0x0f 0x2b - invalid */
4088
4089
4090/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
4091FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
4092{
4093 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4094 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4095 if (IEM_IS_MODRM_REG_MODE(bRm))
4096 {
4097 /*
4098 * Register, register.
4099 */
4100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4101
4102 IEM_MC_BEGIN(3, 1);
4103 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4104 IEM_MC_LOCAL(uint64_t, u64Dst);
4105 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4106 IEM_MC_ARG(uint64_t, u64Src, 2);
4107 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4108 IEM_MC_PREPARE_FPU_USAGE();
4109 IEM_MC_FPU_TO_MMX_MODE();
4110
4111 IEM_MC_REF_MXCSR(pfMxcsr);
4112 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4113
4114 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4115 IEM_MC_IF_MXCSR_XCPT_PENDING()
4116 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4117 IEM_MC_ELSE()
4118 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4119 IEM_MC_ENDIF();
4120
4121 IEM_MC_ADVANCE_RIP_AND_FINISH();
4122 IEM_MC_END();
4123 }
4124 else
4125 {
4126 /*
4127 * Register, memory.
4128 */
4129 IEM_MC_BEGIN(3, 2);
4130 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4131 IEM_MC_LOCAL(uint64_t, u64Dst);
4132 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4133 IEM_MC_ARG(uint64_t, u64Src, 2);
4134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4135
4136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4138 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4139 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4140
4141 IEM_MC_PREPARE_FPU_USAGE();
4142 IEM_MC_FPU_TO_MMX_MODE();
4143 IEM_MC_REF_MXCSR(pfMxcsr);
4144
4145 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4146 IEM_MC_IF_MXCSR_XCPT_PENDING()
4147 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4148 IEM_MC_ELSE()
4149 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4150 IEM_MC_ENDIF();
4151
4152 IEM_MC_ADVANCE_RIP_AND_FINISH();
4153 IEM_MC_END();
4154 }
4155}
4156
4157
4158/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4159FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4160{
4161 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4163 if (IEM_IS_MODRM_REG_MODE(bRm))
4164 {
4165 /*
4166 * Register, register.
4167 */
4168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4169
4170 IEM_MC_BEGIN(3, 1);
4171 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4172 IEM_MC_LOCAL(uint64_t, u64Dst);
4173 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4174 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4175 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4176 IEM_MC_PREPARE_FPU_USAGE();
4177 IEM_MC_FPU_TO_MMX_MODE();
4178
4179 IEM_MC_REF_MXCSR(pfMxcsr);
4180 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4181
4182 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4183 IEM_MC_IF_MXCSR_XCPT_PENDING()
4184 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4185 IEM_MC_ELSE()
4186 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4187 IEM_MC_ENDIF();
4188
4189 IEM_MC_ADVANCE_RIP_AND_FINISH();
4190 IEM_MC_END();
4191 }
4192 else
4193 {
4194 /*
4195 * Register, memory.
4196 */
4197 IEM_MC_BEGIN(3, 3);
4198 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4199 IEM_MC_LOCAL(uint64_t, u64Dst);
4200 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4201 IEM_MC_LOCAL(X86XMMREG, uSrc);
4202 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4204
4205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4207 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4208 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4209
4210 IEM_MC_PREPARE_FPU_USAGE();
4211 IEM_MC_FPU_TO_MMX_MODE();
4212
4213 IEM_MC_REF_MXCSR(pfMxcsr);
4214
4215 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4216 IEM_MC_IF_MXCSR_XCPT_PENDING()
4217 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4218 IEM_MC_ELSE()
4219 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4220 IEM_MC_ENDIF();
4221
4222 IEM_MC_ADVANCE_RIP_AND_FINISH();
4223 IEM_MC_END();
4224 }
4225}
4226
4227
4228/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4229FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4230{
4231 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4232
4233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4234 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4235 {
4236 if (IEM_IS_MODRM_REG_MODE(bRm))
4237 {
4238 /* greg64, XMM */
4239 IEM_MC_BEGIN(3, 2);
4240 IEM_MC_LOCAL(uint32_t, fMxcsr);
4241 IEM_MC_LOCAL(int64_t, i64Dst);
4242 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4243 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4244 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4245
4246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4247 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4248 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4249
4250 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4251 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4252 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4253 IEM_MC_IF_MXCSR_XCPT_PENDING()
4254 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4255 IEM_MC_ELSE()
4256 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4257 IEM_MC_ENDIF();
4258
4259 IEM_MC_ADVANCE_RIP_AND_FINISH();
4260 IEM_MC_END();
4261 }
4262 else
4263 {
4264 /* greg64, [mem64] */
4265 IEM_MC_BEGIN(3, 4);
4266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4267 IEM_MC_LOCAL(uint32_t, fMxcsr);
4268 IEM_MC_LOCAL(int64_t, i64Dst);
4269 IEM_MC_LOCAL(uint32_t, u32Src);
4270 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4271 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4272 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4273
4274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4276 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4277 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4278
4279 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4280 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4281 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4282 IEM_MC_IF_MXCSR_XCPT_PENDING()
4283 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4284 IEM_MC_ELSE()
4285 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4286 IEM_MC_ENDIF();
4287
4288 IEM_MC_ADVANCE_RIP_AND_FINISH();
4289 IEM_MC_END();
4290 }
4291 }
4292 else
4293 {
4294 if (IEM_IS_MODRM_REG_MODE(bRm))
4295 {
4296 /* greg, XMM */
4297 IEM_MC_BEGIN(3, 2);
4298 IEM_MC_LOCAL(uint32_t, fMxcsr);
4299 IEM_MC_LOCAL(int32_t, i32Dst);
4300 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4301 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4302 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4303
4304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4305 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4306 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4307
4308 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4309 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4310 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4311 IEM_MC_IF_MXCSR_XCPT_PENDING()
4312 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4313 IEM_MC_ELSE()
4314 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4315 IEM_MC_ENDIF();
4316
4317 IEM_MC_ADVANCE_RIP_AND_FINISH();
4318 IEM_MC_END();
4319 }
4320 else
4321 {
4322 /* greg, [mem] */
4323 IEM_MC_BEGIN(3, 4);
4324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4325 IEM_MC_LOCAL(uint32_t, fMxcsr);
4326 IEM_MC_LOCAL(int32_t, i32Dst);
4327 IEM_MC_LOCAL(uint32_t, u32Src);
4328 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4329 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4330 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4331
4332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4334 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4335 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4336
4337 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4338 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4339 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4340 IEM_MC_IF_MXCSR_XCPT_PENDING()
4341 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4342 IEM_MC_ELSE()
4343 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4344 IEM_MC_ENDIF();
4345
4346 IEM_MC_ADVANCE_RIP_AND_FINISH();
4347 IEM_MC_END();
4348 }
4349 }
4350}
4351
4352
4353/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4354FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4355{
4356 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4357
4358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4359 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4360 {
4361 if (IEM_IS_MODRM_REG_MODE(bRm))
4362 {
4363 /* greg64, XMM */
4364 IEM_MC_BEGIN(3, 2);
4365 IEM_MC_LOCAL(uint32_t, fMxcsr);
4366 IEM_MC_LOCAL(int64_t, i64Dst);
4367 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4368 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4369 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4370
4371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4372 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4373 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4374
4375 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4376 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4377 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4378 IEM_MC_IF_MXCSR_XCPT_PENDING()
4379 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4380 IEM_MC_ELSE()
4381 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4382 IEM_MC_ENDIF();
4383
4384 IEM_MC_ADVANCE_RIP_AND_FINISH();
4385 IEM_MC_END();
4386 }
4387 else
4388 {
4389 /* greg64, [mem64] */
4390 IEM_MC_BEGIN(3, 4);
4391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4392 IEM_MC_LOCAL(uint32_t, fMxcsr);
4393 IEM_MC_LOCAL(int64_t, i64Dst);
4394 IEM_MC_LOCAL(uint64_t, u64Src);
4395 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4396 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4397 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4398
4399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4401 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4402 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4403
4404 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4405 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4406 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4407 IEM_MC_IF_MXCSR_XCPT_PENDING()
4408 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4409 IEM_MC_ELSE()
4410 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4411 IEM_MC_ENDIF();
4412
4413 IEM_MC_ADVANCE_RIP_AND_FINISH();
4414 IEM_MC_END();
4415 }
4416 }
4417 else
4418 {
4419 if (IEM_IS_MODRM_REG_MODE(bRm))
4420 {
4421 /* greg, XMM */
4422 IEM_MC_BEGIN(3, 2);
4423 IEM_MC_LOCAL(uint32_t, fMxcsr);
4424 IEM_MC_LOCAL(int32_t, i32Dst);
4425 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4426 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4427 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4428
4429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4430 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4431 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4432
4433 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4434 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4435 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4436 IEM_MC_IF_MXCSR_XCPT_PENDING()
4437 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4438 IEM_MC_ELSE()
4439 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4440 IEM_MC_ENDIF();
4441
4442 IEM_MC_ADVANCE_RIP_AND_FINISH();
4443 IEM_MC_END();
4444 }
4445 else
4446 {
4447 /* greg32, [mem32] */
4448 IEM_MC_BEGIN(3, 4);
4449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4450 IEM_MC_LOCAL(uint32_t, fMxcsr);
4451 IEM_MC_LOCAL(int32_t, i32Dst);
4452 IEM_MC_LOCAL(uint64_t, u64Src);
4453 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4454 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4455 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4456
4457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4459 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4460 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4461
4462 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4463 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4464 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4465 IEM_MC_IF_MXCSR_XCPT_PENDING()
4466 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4467 IEM_MC_ELSE()
4468 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4469 IEM_MC_ENDIF();
4470
4471 IEM_MC_ADVANCE_RIP_AND_FINISH();
4472 IEM_MC_END();
4473 }
4474 }
4475}
4476
4477
4478/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4479FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4480{
4481 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4482 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4483 if (IEM_IS_MODRM_REG_MODE(bRm))
4484 {
4485 /*
4486 * Register, register.
4487 */
4488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4489
4490 IEM_MC_BEGIN(3, 1);
4491 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4492 IEM_MC_LOCAL(uint64_t, u64Dst);
4493 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4494 IEM_MC_ARG(uint64_t, u64Src, 2);
4495 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4496 IEM_MC_PREPARE_FPU_USAGE();
4497 IEM_MC_FPU_TO_MMX_MODE();
4498
4499 IEM_MC_REF_MXCSR(pfMxcsr);
4500 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4501
4502 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4503 IEM_MC_IF_MXCSR_XCPT_PENDING()
4504 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4505 IEM_MC_ELSE()
4506 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4507 IEM_MC_ENDIF();
4508
4509 IEM_MC_ADVANCE_RIP_AND_FINISH();
4510 IEM_MC_END();
4511 }
4512 else
4513 {
4514 /*
4515 * Register, memory.
4516 */
4517 IEM_MC_BEGIN(3, 2);
4518 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4519 IEM_MC_LOCAL(uint64_t, u64Dst);
4520 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4521 IEM_MC_ARG(uint64_t, u64Src, 2);
4522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4523
4524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4526 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4527 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4528
4529 IEM_MC_PREPARE_FPU_USAGE();
4530 IEM_MC_FPU_TO_MMX_MODE();
4531 IEM_MC_REF_MXCSR(pfMxcsr);
4532
4533 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4534 IEM_MC_IF_MXCSR_XCPT_PENDING()
4535 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4536 IEM_MC_ELSE()
4537 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4538 IEM_MC_ENDIF();
4539
4540 IEM_MC_ADVANCE_RIP_AND_FINISH();
4541 IEM_MC_END();
4542 }
4543}
4544
4545
4546/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4547FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4548{
4549 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4550 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4551 if (IEM_IS_MODRM_REG_MODE(bRm))
4552 {
4553 /*
4554 * Register, register.
4555 */
4556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4557
4558 IEM_MC_BEGIN(3, 1);
4559 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4560 IEM_MC_LOCAL(uint64_t, u64Dst);
4561 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4562 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4563 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4564 IEM_MC_PREPARE_FPU_USAGE();
4565 IEM_MC_FPU_TO_MMX_MODE();
4566
4567 IEM_MC_REF_MXCSR(pfMxcsr);
4568 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4569
4570 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4571 IEM_MC_IF_MXCSR_XCPT_PENDING()
4572 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4573 IEM_MC_ELSE()
4574 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4575 IEM_MC_ENDIF();
4576
4577 IEM_MC_ADVANCE_RIP_AND_FINISH();
4578 IEM_MC_END();
4579 }
4580 else
4581 {
4582 /*
4583 * Register, memory.
4584 */
4585 IEM_MC_BEGIN(3, 3);
4586 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4587 IEM_MC_LOCAL(uint64_t, u64Dst);
4588 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4589 IEM_MC_LOCAL(X86XMMREG, uSrc);
4590 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4592
4593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4595 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4596 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4597
4598 IEM_MC_PREPARE_FPU_USAGE();
4599 IEM_MC_FPU_TO_MMX_MODE();
4600
4601 IEM_MC_REF_MXCSR(pfMxcsr);
4602
4603 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4604 IEM_MC_IF_MXCSR_XCPT_PENDING()
4605 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4606 IEM_MC_ELSE()
4607 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4608 IEM_MC_ENDIF();
4609
4610 IEM_MC_ADVANCE_RIP_AND_FINISH();
4611 IEM_MC_END();
4612 }
4613}
4614
4615
4616/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4617FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4618{
4619 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4620
4621 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4622 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4623 {
4624 if (IEM_IS_MODRM_REG_MODE(bRm))
4625 {
4626 /* greg64, XMM */
4627 IEM_MC_BEGIN(3, 2);
4628 IEM_MC_LOCAL(uint32_t, fMxcsr);
4629 IEM_MC_LOCAL(int64_t, i64Dst);
4630 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4631 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4632 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4633
4634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4635 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4636 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4637
4638 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4639 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4640 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4641 IEM_MC_IF_MXCSR_XCPT_PENDING()
4642 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4643 IEM_MC_ELSE()
4644 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4645 IEM_MC_ENDIF();
4646
4647 IEM_MC_ADVANCE_RIP_AND_FINISH();
4648 IEM_MC_END();
4649 }
4650 else
4651 {
4652 /* greg64, [mem64] */
4653 IEM_MC_BEGIN(3, 4);
4654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4655 IEM_MC_LOCAL(uint32_t, fMxcsr);
4656 IEM_MC_LOCAL(int64_t, i64Dst);
4657 IEM_MC_LOCAL(uint32_t, u32Src);
4658 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4659 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4660 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4661
4662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4664 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4665 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4666
4667 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4668 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4669 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4670 IEM_MC_IF_MXCSR_XCPT_PENDING()
4671 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4672 IEM_MC_ELSE()
4673 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4674 IEM_MC_ENDIF();
4675
4676 IEM_MC_ADVANCE_RIP_AND_FINISH();
4677 IEM_MC_END();
4678 }
4679 }
4680 else
4681 {
4682 if (IEM_IS_MODRM_REG_MODE(bRm))
4683 {
4684 /* greg, XMM */
4685 IEM_MC_BEGIN(3, 2);
4686 IEM_MC_LOCAL(uint32_t, fMxcsr);
4687 IEM_MC_LOCAL(int32_t, i32Dst);
4688 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4689 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4690 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4691
4692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4693 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4694 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4695
4696 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4697 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4698 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4699 IEM_MC_IF_MXCSR_XCPT_PENDING()
4700 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4701 IEM_MC_ELSE()
4702 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4703 IEM_MC_ENDIF();
4704
4705 IEM_MC_ADVANCE_RIP_AND_FINISH();
4706 IEM_MC_END();
4707 }
4708 else
4709 {
4710 /* greg, [mem] */
4711 IEM_MC_BEGIN(3, 4);
4712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4713 IEM_MC_LOCAL(uint32_t, fMxcsr);
4714 IEM_MC_LOCAL(int32_t, i32Dst);
4715 IEM_MC_LOCAL(uint32_t, u32Src);
4716 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4717 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4718 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4719
4720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4722 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4723 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4724
4725 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4726 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4727 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4728 IEM_MC_IF_MXCSR_XCPT_PENDING()
4729 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4730 IEM_MC_ELSE()
4731 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4732 IEM_MC_ENDIF();
4733
4734 IEM_MC_ADVANCE_RIP_AND_FINISH();
4735 IEM_MC_END();
4736 }
4737 }
4738}
4739
4740
4741/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4742FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4743{
4744 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4745
4746 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4747 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4748 {
4749 if (IEM_IS_MODRM_REG_MODE(bRm))
4750 {
4751 /* greg64, XMM */
4752 IEM_MC_BEGIN(3, 2);
4753 IEM_MC_LOCAL(uint32_t, fMxcsr);
4754 IEM_MC_LOCAL(int64_t, i64Dst);
4755 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4756 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4757 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4758
4759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4760 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4761 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4762
4763 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4764 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4765 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4766 IEM_MC_IF_MXCSR_XCPT_PENDING()
4767 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4768 IEM_MC_ELSE()
4769 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4770 IEM_MC_ENDIF();
4771
4772 IEM_MC_ADVANCE_RIP_AND_FINISH();
4773 IEM_MC_END();
4774 }
4775 else
4776 {
4777 /* greg64, [mem64] */
4778 IEM_MC_BEGIN(3, 4);
4779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4780 IEM_MC_LOCAL(uint32_t, fMxcsr);
4781 IEM_MC_LOCAL(int64_t, i64Dst);
4782 IEM_MC_LOCAL(uint64_t, u64Src);
4783 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4784 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4785 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4786
4787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4789 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4790 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4791
4792 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4793 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4794 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4795 IEM_MC_IF_MXCSR_XCPT_PENDING()
4796 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4797 IEM_MC_ELSE()
4798 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4799 IEM_MC_ENDIF();
4800
4801 IEM_MC_ADVANCE_RIP_AND_FINISH();
4802 IEM_MC_END();
4803 }
4804 }
4805 else
4806 {
4807 if (IEM_IS_MODRM_REG_MODE(bRm))
4808 {
4809 /* greg32, XMM */
4810 IEM_MC_BEGIN(3, 2);
4811 IEM_MC_LOCAL(uint32_t, fMxcsr);
4812 IEM_MC_LOCAL(int32_t, i32Dst);
4813 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4814 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4815 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4816
4817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4818 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4819 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4820
4821 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4822 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4823 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4824 IEM_MC_IF_MXCSR_XCPT_PENDING()
4825 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4826 IEM_MC_ELSE()
4827 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4828 IEM_MC_ENDIF();
4829
4830 IEM_MC_ADVANCE_RIP_AND_FINISH();
4831 IEM_MC_END();
4832 }
4833 else
4834 {
4835 /* greg32, [mem64] */
4836 IEM_MC_BEGIN(3, 4);
4837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4838 IEM_MC_LOCAL(uint32_t, fMxcsr);
4839 IEM_MC_LOCAL(int32_t, i32Dst);
4840 IEM_MC_LOCAL(uint64_t, u64Src);
4841 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4842 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4843 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4844
4845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4847 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4848 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4849
4850 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4851 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4852 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4853 IEM_MC_IF_MXCSR_XCPT_PENDING()
4854 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4855 IEM_MC_ELSE()
4856 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4857 IEM_MC_ENDIF();
4858
4859 IEM_MC_ADVANCE_RIP_AND_FINISH();
4860 IEM_MC_END();
4861 }
4862 }
4863}
4864
4865
4866/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
4867FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4868{
4869 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4870 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4871 if (IEM_IS_MODRM_REG_MODE(bRm))
4872 {
4873 /*
4874 * Register, register.
4875 */
4876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4877 IEM_MC_BEGIN(4, 1);
4878 IEM_MC_LOCAL(uint32_t, fEFlags);
4879 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4880 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4881 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4882 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4883 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4884 IEM_MC_PREPARE_SSE_USAGE();
4885 IEM_MC_FETCH_EFLAGS(fEFlags);
4886 IEM_MC_REF_MXCSR(pfMxcsr);
4887 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4888 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4889 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4890 IEM_MC_IF_MXCSR_XCPT_PENDING()
4891 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4892 IEM_MC_ELSE()
4893 IEM_MC_COMMIT_EFLAGS(fEFlags);
4894 IEM_MC_ENDIF();
4895
4896 IEM_MC_ADVANCE_RIP_AND_FINISH();
4897 IEM_MC_END();
4898 }
4899 else
4900 {
4901 /*
4902 * Register, memory.
4903 */
4904 IEM_MC_BEGIN(4, 3);
4905 IEM_MC_LOCAL(uint32_t, fEFlags);
4906 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4907 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4908 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4909 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4910 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4912
4913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4915 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4916 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4917
4918 IEM_MC_PREPARE_SSE_USAGE();
4919 IEM_MC_FETCH_EFLAGS(fEFlags);
4920 IEM_MC_REF_MXCSR(pfMxcsr);
4921 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4922 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4923 IEM_MC_IF_MXCSR_XCPT_PENDING()
4924 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4925 IEM_MC_ELSE()
4926 IEM_MC_COMMIT_EFLAGS(fEFlags);
4927 IEM_MC_ENDIF();
4928
4929 IEM_MC_ADVANCE_RIP_AND_FINISH();
4930 IEM_MC_END();
4931 }
4932}
4933
4934
4935/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
4936FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4937{
4938 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4939 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4940 if (IEM_IS_MODRM_REG_MODE(bRm))
4941 {
4942 /*
4943 * Register, register.
4944 */
4945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4946 IEM_MC_BEGIN(4, 1);
4947 IEM_MC_LOCAL(uint32_t, fEFlags);
4948 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4949 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4950 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4951 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4952 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4953 IEM_MC_PREPARE_SSE_USAGE();
4954 IEM_MC_FETCH_EFLAGS(fEFlags);
4955 IEM_MC_REF_MXCSR(pfMxcsr);
4956 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4957 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4958 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4959 IEM_MC_IF_MXCSR_XCPT_PENDING()
4960 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4961 IEM_MC_ELSE()
4962 IEM_MC_COMMIT_EFLAGS(fEFlags);
4963 IEM_MC_ENDIF();
4964
4965 IEM_MC_ADVANCE_RIP_AND_FINISH();
4966 IEM_MC_END();
4967 }
4968 else
4969 {
4970 /*
4971 * Register, memory.
4972 */
4973 IEM_MC_BEGIN(4, 3);
4974 IEM_MC_LOCAL(uint32_t, fEFlags);
4975 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4976 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4977 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4978 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4979 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4981
4982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4984 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4985 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4986
4987 IEM_MC_PREPARE_SSE_USAGE();
4988 IEM_MC_FETCH_EFLAGS(fEFlags);
4989 IEM_MC_REF_MXCSR(pfMxcsr);
4990 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4991 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4992 IEM_MC_IF_MXCSR_XCPT_PENDING()
4993 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4994 IEM_MC_ELSE()
4995 IEM_MC_COMMIT_EFLAGS(fEFlags);
4996 IEM_MC_ENDIF();
4997
4998 IEM_MC_ADVANCE_RIP_AND_FINISH();
4999 IEM_MC_END();
5000 }
5001}
5002
5003
5004/* Opcode 0xf3 0x0f 0x2e - invalid */
5005/* Opcode 0xf2 0x0f 0x2e - invalid */
5006
5007
5008/** Opcode 0x0f 0x2f - comiss Vss, Wss */
5009FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
5010{
5011 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5012 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5013 if (IEM_IS_MODRM_REG_MODE(bRm))
5014 {
5015 /*
5016 * Register, register.
5017 */
5018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5019 IEM_MC_BEGIN(4, 1);
5020 IEM_MC_LOCAL(uint32_t, fEFlags);
5021 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5022 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5023 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5024 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5025 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5026 IEM_MC_PREPARE_SSE_USAGE();
5027 IEM_MC_FETCH_EFLAGS(fEFlags);
5028 IEM_MC_REF_MXCSR(pfMxcsr);
5029 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5030 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5031 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5032 IEM_MC_IF_MXCSR_XCPT_PENDING()
5033 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5034 IEM_MC_ELSE()
5035 IEM_MC_COMMIT_EFLAGS(fEFlags);
5036 IEM_MC_ENDIF();
5037
5038 IEM_MC_ADVANCE_RIP_AND_FINISH();
5039 IEM_MC_END();
5040 }
5041 else
5042 {
5043 /*
5044 * Register, memory.
5045 */
5046 IEM_MC_BEGIN(4, 3);
5047 IEM_MC_LOCAL(uint32_t, fEFlags);
5048 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5049 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5050 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5051 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5052 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5054
5055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5057 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5058 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5059
5060 IEM_MC_PREPARE_SSE_USAGE();
5061 IEM_MC_FETCH_EFLAGS(fEFlags);
5062 IEM_MC_REF_MXCSR(pfMxcsr);
5063 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5064 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5065 IEM_MC_IF_MXCSR_XCPT_PENDING()
5066 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5067 IEM_MC_ELSE()
5068 IEM_MC_COMMIT_EFLAGS(fEFlags);
5069 IEM_MC_ENDIF();
5070
5071 IEM_MC_ADVANCE_RIP_AND_FINISH();
5072 IEM_MC_END();
5073 }
5074}
5075
5076
5077/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
5078FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
5079{
5080 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5082 if (IEM_IS_MODRM_REG_MODE(bRm))
5083 {
5084 /*
5085 * Register, register.
5086 */
5087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5088 IEM_MC_BEGIN(4, 1);
5089 IEM_MC_LOCAL(uint32_t, fEFlags);
5090 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5091 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5092 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5093 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5094 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5095 IEM_MC_PREPARE_SSE_USAGE();
5096 IEM_MC_FETCH_EFLAGS(fEFlags);
5097 IEM_MC_REF_MXCSR(pfMxcsr);
5098 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5099 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5100 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5101 IEM_MC_IF_MXCSR_XCPT_PENDING()
5102 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5103 IEM_MC_ELSE()
5104 IEM_MC_COMMIT_EFLAGS(fEFlags);
5105 IEM_MC_ENDIF();
5106
5107 IEM_MC_ADVANCE_RIP_AND_FINISH();
5108 IEM_MC_END();
5109 }
5110 else
5111 {
5112 /*
5113 * Register, memory.
5114 */
5115 IEM_MC_BEGIN(4, 3);
5116 IEM_MC_LOCAL(uint32_t, fEFlags);
5117 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5118 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5119 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5120 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5121 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5123
5124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5126 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5127 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5128
5129 IEM_MC_PREPARE_SSE_USAGE();
5130 IEM_MC_FETCH_EFLAGS(fEFlags);
5131 IEM_MC_REF_MXCSR(pfMxcsr);
5132 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5133 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5134 IEM_MC_IF_MXCSR_XCPT_PENDING()
5135 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5136 IEM_MC_ELSE()
5137 IEM_MC_COMMIT_EFLAGS(fEFlags);
5138 IEM_MC_ENDIF();
5139
5140 IEM_MC_ADVANCE_RIP_AND_FINISH();
5141 IEM_MC_END();
5142 }
5143}
5144
5145
5146/* Opcode 0xf3 0x0f 0x2f - invalid */
5147/* Opcode 0xf2 0x0f 0x2f - invalid */
5148
5149/** Opcode 0x0f 0x30. */
5150FNIEMOP_DEF(iemOp_wrmsr)
5151{
5152 IEMOP_MNEMONIC(wrmsr, "wrmsr");
5153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5154 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
5155}
5156
5157
5158/** Opcode 0x0f 0x31. */
5159FNIEMOP_DEF(iemOp_rdtsc)
5160{
5161 IEMOP_MNEMONIC(rdtsc, "rdtsc");
5162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5163 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
5164}
5165
5166
5167/** Opcode 0x0f 0x33. */
5168FNIEMOP_DEF(iemOp_rdmsr)
5169{
5170 IEMOP_MNEMONIC(rdmsr, "rdmsr");
5171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5172 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
5173}
5174
5175
5176/** Opcode 0x0f 0x34. */
5177FNIEMOP_DEF(iemOp_rdpmc)
5178{
5179 IEMOP_MNEMONIC(rdpmc, "rdpmc");
5180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5181 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
5182}
5183
5184
5185/** Opcode 0x0f 0x34. */
5186FNIEMOP_DEF(iemOp_sysenter)
5187{
5188 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5190 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
5191}
5192
5193/** Opcode 0x0f 0x35. */
5194FNIEMOP_DEF(iemOp_sysexit)
5195{
5196 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5198 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
5199}
5200
5201/** Opcode 0x0f 0x37. */
5202FNIEMOP_STUB(iemOp_getsec);
5203
5204
5205/** Opcode 0x0f 0x38. */
5206FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
5207{
5208#ifdef IEM_WITH_THREE_0F_38
5209 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5210 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5211#else
5212 IEMOP_BITCH_ABOUT_STUB();
5213 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5214#endif
5215}
5216
5217
5218/** Opcode 0x0f 0x3a. */
5219FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
5220{
5221#ifdef IEM_WITH_THREE_0F_3A
5222 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5223 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5224#else
5225 IEMOP_BITCH_ABOUT_STUB();
5226 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5227#endif
5228}
5229
5230
5231/**
5232 * Implements a conditional move.
5233 *
5234 * Wish there was an obvious way to do this where we could share and reduce
5235 * code bloat.
5236 *
5237 * @param a_Cnd The conditional "microcode" operation.
5238 */
5239#define CMOV_X(a_Cnd) \
5240 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5241 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5242 { \
5243 switch (pVCpu->iem.s.enmEffOpSize) \
5244 { \
5245 case IEMMODE_16BIT: \
5246 IEM_MC_BEGIN(0, 1); \
5247 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5248 a_Cnd { \
5249 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5250 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5251 } IEM_MC_ENDIF(); \
5252 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5253 IEM_MC_END(); \
5254 break; \
5255 \
5256 case IEMMODE_32BIT: \
5257 IEM_MC_BEGIN(0, 1); \
5258 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5259 a_Cnd { \
5260 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5261 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5262 } IEM_MC_ELSE() { \
5263 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5264 } IEM_MC_ENDIF(); \
5265 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5266 IEM_MC_END(); \
5267 break; \
5268 \
5269 case IEMMODE_64BIT: \
5270 IEM_MC_BEGIN(0, 1); \
5271 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5272 a_Cnd { \
5273 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5274 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5275 } IEM_MC_ENDIF(); \
5276 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5277 IEM_MC_END(); \
5278 break; \
5279 \
5280 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5281 } \
5282 } \
5283 else \
5284 { \
5285 switch (pVCpu->iem.s.enmEffOpSize) \
5286 { \
5287 case IEMMODE_16BIT: \
5288 IEM_MC_BEGIN(0, 2); \
5289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5290 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5292 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5293 a_Cnd { \
5294 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5295 } IEM_MC_ENDIF(); \
5296 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5297 IEM_MC_END(); \
5298 break; \
5299 \
5300 case IEMMODE_32BIT: \
5301 IEM_MC_BEGIN(0, 2); \
5302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5303 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5305 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5306 a_Cnd { \
5307 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5308 } IEM_MC_ELSE() { \
5309 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5310 } IEM_MC_ENDIF(); \
5311 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5312 IEM_MC_END(); \
5313 break; \
5314 \
5315 case IEMMODE_64BIT: \
5316 IEM_MC_BEGIN(0, 2); \
5317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5318 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5320 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5321 a_Cnd { \
5322 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5323 } IEM_MC_ENDIF(); \
5324 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5325 IEM_MC_END(); \
5326 break; \
5327 \
5328 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5329 } \
5330 } do {} while (0)
5331
5332
5333
5334/** Opcode 0x0f 0x40. */
5335FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5336{
5337 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5338 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5339}
5340
5341
5342/** Opcode 0x0f 0x41. */
5343FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5344{
5345 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5346 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5347}
5348
5349
5350/** Opcode 0x0f 0x42. */
5351FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5352{
5353 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5354 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5355}
5356
5357
5358/** Opcode 0x0f 0x43. */
5359FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5360{
5361 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5362 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5363}
5364
5365
5366/** Opcode 0x0f 0x44. */
5367FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5368{
5369 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5370 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5371}
5372
5373
5374/** Opcode 0x0f 0x45. */
5375FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5376{
5377 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5378 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5379}
5380
5381
5382/** Opcode 0x0f 0x46. */
5383FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5384{
5385 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5386 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5387}
5388
5389
5390/** Opcode 0x0f 0x47. */
5391FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5392{
5393 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5394 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5395}
5396
5397
5398/** Opcode 0x0f 0x48. */
5399FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5400{
5401 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5402 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5403}
5404
5405
5406/** Opcode 0x0f 0x49. */
5407FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5408{
5409 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5410 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5411}
5412
5413
5414/** Opcode 0x0f 0x4a. */
5415FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5416{
5417 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5418 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5419}
5420
5421
5422/** Opcode 0x0f 0x4b. */
5423FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5424{
5425 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5426 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5427}
5428
5429
5430/** Opcode 0x0f 0x4c. */
5431FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5432{
5433 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5434 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5435}
5436
5437
5438/** Opcode 0x0f 0x4d. */
5439FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5440{
5441 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5442 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5443}
5444
5445
5446/** Opcode 0x0f 0x4e. */
5447FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5448{
5449 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5450 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5451}
5452
5453
5454/** Opcode 0x0f 0x4f. */
5455FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5456{
5457 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5458 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5459}
5460
5461#undef CMOV_X
5462
5463/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5464FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5465{
5466 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /** @todo */
5467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5468 if (IEM_IS_MODRM_REG_MODE(bRm))
5469 {
5470 /*
5471 * Register, register.
5472 */
5473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5474 IEM_MC_BEGIN(2, 1);
5475 IEM_MC_LOCAL(uint8_t, u8Dst);
5476 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5477 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5478 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5479 IEM_MC_PREPARE_SSE_USAGE();
5480 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5481 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5482 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5483 IEM_MC_ADVANCE_RIP_AND_FINISH();
5484 IEM_MC_END();
5485 }
5486 /* No memory operand. */
5487 else
5488 return IEMOP_RAISE_INVALID_OPCODE();
5489}
5490
5491
5492/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5493FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5494{
5495 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /** @todo */
5496 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5497 if (IEM_IS_MODRM_REG_MODE(bRm))
5498 {
5499 /*
5500 * Register, register.
5501 */
5502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5503 IEM_MC_BEGIN(2, 1);
5504 IEM_MC_LOCAL(uint8_t, u8Dst);
5505 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5506 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5507 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5508 IEM_MC_PREPARE_SSE_USAGE();
5509 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5510 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5511 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG_8(bRm), u8Dst);
5512 IEM_MC_ADVANCE_RIP_AND_FINISH();
5513 IEM_MC_END();
5514 }
5515 /* No memory operand. */
5516 else
5517 return IEMOP_RAISE_INVALID_OPCODE();
5518
5519}
5520
5521
5522/* Opcode 0xf3 0x0f 0x50 - invalid */
5523/* Opcode 0xf2 0x0f 0x50 - invalid */
5524
5525
5526/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5527FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5528{
5529 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5530 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5531}
5532
5533
5534/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5535FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5536{
5537 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5538 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5539}
5540
5541
5542/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5543FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5544{
5545 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5546 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5547}
5548
5549
5550/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5551FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5552{
5553 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5554 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5555}
5556
5557
5558/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5559FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5560{
5561 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5562 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5563}
5564
5565
5566/* Opcode 0x66 0x0f 0x52 - invalid */
5567
5568
5569/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5570FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5571{
5572 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5573 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5574}
5575
5576
5577/* Opcode 0xf2 0x0f 0x52 - invalid */
5578
5579/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5580FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
5581/* Opcode 0x66 0x0f 0x53 - invalid */
5582/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5583FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
5584/* Opcode 0xf2 0x0f 0x53 - invalid */
5585
5586
5587/** Opcode 0x0f 0x54 - andps Vps, Wps */
5588FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5589{
5590 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5591 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pand_u128);
5592}
5593
5594
5595/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5596FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5597{
5598 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5599 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5600}
5601
5602
5603/* Opcode 0xf3 0x0f 0x54 - invalid */
5604/* Opcode 0xf2 0x0f 0x54 - invalid */
5605
5606
5607/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5608FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5609{
5610 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5611 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pandn_u128);
5612}
5613
5614
5615/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5616FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5617{
5618 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5619 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5620}
5621
5622
5623/* Opcode 0xf3 0x0f 0x55 - invalid */
5624/* Opcode 0xf2 0x0f 0x55 - invalid */
5625
5626
5627/** Opcode 0x0f 0x56 - orps Vps, Wps */
5628FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5629{
5630 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5631 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_por_u128);
5632}
5633
5634
5635/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5636FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5637{
5638 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5639 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5640}
5641
5642
5643/* Opcode 0xf3 0x0f 0x56 - invalid */
5644/* Opcode 0xf2 0x0f 0x56 - invalid */
5645
5646
5647/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5648FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5649{
5650 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5651 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pxor_u128);
5652}
5653
5654
5655/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5656FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5657{
5658 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5659 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5660}
5661
5662
5663/* Opcode 0xf3 0x0f 0x57 - invalid */
5664/* Opcode 0xf2 0x0f 0x57 - invalid */
5665
5666/** Opcode 0x0f 0x58 - addps Vps, Wps */
5667FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5668{
5669 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5670 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5671}
5672
5673
5674/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5675FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5676{
5677 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5678 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5679}
5680
5681
5682/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5683FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5684{
5685 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5686 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5687}
5688
5689
5690/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5691FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5692{
5693 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5694 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5695}
5696
5697
5698/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5699FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5700{
5701 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5702 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5703}
5704
5705
5706/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5707FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5708{
5709 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5710 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5711}
5712
5713
5714/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5715FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5716{
5717 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5718 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5719}
5720
5721
5722/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5723FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5724{
5725 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5726 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5727}
5728
5729
5730/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5731FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5732{
5733 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5734 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5735}
5736
5737
5738/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5739FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5740{
5741 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5742 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5743}
5744
5745
5746/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5747FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5748{
5749 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5750 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5751}
5752
5753
5754/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5755FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5756{
5757 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5758 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5759}
5760
5761
5762/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5763FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5764{
5765 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5766 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5767}
5768
5769
5770/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5771FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5772{
5773 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5774 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5775}
5776
5777
5778/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5779FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5780{
5781 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5782 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5783}
5784
5785
5786/* Opcode 0xf2 0x0f 0x5b - invalid */
5787
5788
5789/** Opcode 0x0f 0x5c - subps Vps, Wps */
5790FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5791{
5792 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5793 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5794}
5795
5796
5797/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5798FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5799{
5800 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5801 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5802}
5803
5804
5805/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5806FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5807{
5808 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5809 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5810}
5811
5812
5813/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5814FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5815{
5816 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5817 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5818}
5819
5820
5821/** Opcode 0x0f 0x5d - minps Vps, Wps */
5822FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5823{
5824 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5825 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5826}
5827
5828
5829/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5830FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5831{
5832 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5833 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5834}
5835
5836
5837/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5838FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5839{
5840 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5841 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5842}
5843
5844
5845/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5846FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5847{
5848 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5849 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5850}
5851
5852
5853/** Opcode 0x0f 0x5e - divps Vps, Wps */
5854FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5855{
5856 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5857 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5858}
5859
5860
5861/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5862FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5863{
5864 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5865 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5866}
5867
5868
5869/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5870FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5871{
5872 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5873 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5874}
5875
5876
5877/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5878FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5879{
5880 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5881 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5882}
5883
5884
5885/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5886FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5887{
5888 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5889 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5890}
5891
5892
5893/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5894FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5895{
5896 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5897 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5898}
5899
5900
5901/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5902FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5903{
5904 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5905 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5906}
5907
5908
5909/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5910FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5911{
5912 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5913 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5914}
5915
5916
5917/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5918FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5919{
5920 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5921 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5922}
5923
5924
5925/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5926FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5927{
5928 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5929 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5930}
5931
5932
5933/* Opcode 0xf3 0x0f 0x60 - invalid */
5934
5935
5936/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5937FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5938{
5939 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5940 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5941 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5942}
5943
5944
5945/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5946FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5947{
5948 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5949 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5950}
5951
5952
5953/* Opcode 0xf3 0x0f 0x61 - invalid */
5954
5955
5956/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5957FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5958{
5959 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5960 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5961}
5962
5963
5964/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5965FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5966{
5967 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5968 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5969}
5970
5971
5972/* Opcode 0xf3 0x0f 0x62 - invalid */
5973
5974
5975
5976/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5977FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5978{
5979 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5980 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5981}
5982
5983
5984/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5985FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5986{
5987 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5988 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5989}
5990
5991
5992/* Opcode 0xf3 0x0f 0x63 - invalid */
5993
5994
5995/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5996FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5997{
5998 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5999 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
6000}
6001
6002
6003/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
6004FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
6005{
6006 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6007 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
6008}
6009
6010
6011/* Opcode 0xf3 0x0f 0x64 - invalid */
6012
6013
6014/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
6015FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
6016{
6017 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6018 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
6019}
6020
6021
6022/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
6023FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
6024{
6025 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6026 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
6027}
6028
6029
6030/* Opcode 0xf3 0x0f 0x65 - invalid */
6031
6032
6033/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
6034FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
6035{
6036 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6037 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
6038}
6039
6040
6041/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
6042FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
6043{
6044 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6045 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
6046}
6047
6048
6049/* Opcode 0xf3 0x0f 0x66 - invalid */
6050
6051
6052/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
6053FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
6054{
6055 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6056 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
6057}
6058
6059
6060/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
6061FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
6062{
6063 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6064 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
6065}
6066
6067
6068/* Opcode 0xf3 0x0f 0x67 - invalid */
6069
6070
6071/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
6072 * @note Intel and AMD both uses Qd for the second parameter, however they
6073 * both list it as a mmX/mem64 operand and intel describes it as being
6074 * loaded as a qword, so it should be Qq, shouldn't it? */
6075FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
6076{
6077 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6078 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
6079}
6080
6081
6082/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
6083FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
6084{
6085 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6086 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
6087}
6088
6089
6090/* Opcode 0xf3 0x0f 0x68 - invalid */
6091
6092
6093/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
6094 * @note Intel and AMD both uses Qd for the second parameter, however they
6095 * both list it as a mmX/mem64 operand and intel describes it as being
6096 * loaded as a qword, so it should be Qq, shouldn't it? */
6097FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
6098{
6099 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6100 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
6101}
6102
6103
6104/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
6105FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
6106{
6107 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6108 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
6109
6110}
6111
6112
6113/* Opcode 0xf3 0x0f 0x69 - invalid */
6114
6115
6116/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
6117 * @note Intel and AMD both uses Qd for the second parameter, however they
6118 * both list it as a mmX/mem64 operand and intel describes it as being
6119 * loaded as a qword, so it should be Qq, shouldn't it? */
6120FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
6121{
6122 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6123 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
6124}
6125
6126
6127/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
6128FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
6129{
6130 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6131 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
6132}
6133
6134
6135/* Opcode 0xf3 0x0f 0x6a - invalid */
6136
6137
6138/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
6139FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
6140{
6141 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6142 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
6143}
6144
6145
6146/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
6147FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
6148{
6149 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6150 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
6151}
6152
6153
6154/* Opcode 0xf3 0x0f 0x6b - invalid */
6155
6156
6157/* Opcode 0x0f 0x6c - invalid */
6158
6159
6160/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
6161FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
6162{
6163 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6164 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
6165}
6166
6167
6168/* Opcode 0xf3 0x0f 0x6c - invalid */
6169/* Opcode 0xf2 0x0f 0x6c - invalid */
6170
6171
6172/* Opcode 0x0f 0x6d - invalid */
6173
6174
6175/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
6176FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
6177{
6178 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6179 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6180}
6181
6182
6183/* Opcode 0xf3 0x0f 0x6d - invalid */
6184
6185
6186FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6187{
6188 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6189 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6190 {
6191 /**
6192 * @opcode 0x6e
6193 * @opcodesub rex.w=1
6194 * @oppfx none
6195 * @opcpuid mmx
6196 * @opgroup og_mmx_datamove
6197 * @opxcpttype 5
6198 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6199 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6200 */
6201 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6202 if (IEM_IS_MODRM_REG_MODE(bRm))
6203 {
6204 /* MMX, greg64 */
6205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6206 IEM_MC_BEGIN(0, 1);
6207 IEM_MC_LOCAL(uint64_t, u64Tmp);
6208
6209 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6210 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6211 IEM_MC_FPU_TO_MMX_MODE();
6212
6213 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6214 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6215
6216 IEM_MC_ADVANCE_RIP_AND_FINISH();
6217 IEM_MC_END();
6218 }
6219 else
6220 {
6221 /* MMX, [mem64] */
6222 IEM_MC_BEGIN(0, 2);
6223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6224 IEM_MC_LOCAL(uint64_t, u64Tmp);
6225
6226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6228 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6229 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6230 IEM_MC_FPU_TO_MMX_MODE();
6231
6232 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6233 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6234
6235 IEM_MC_ADVANCE_RIP_AND_FINISH();
6236 IEM_MC_END();
6237 }
6238 }
6239 else
6240 {
6241 /**
6242 * @opdone
6243 * @opcode 0x6e
6244 * @opcodesub rex.w=0
6245 * @oppfx none
6246 * @opcpuid mmx
6247 * @opgroup og_mmx_datamove
6248 * @opxcpttype 5
6249 * @opfunction iemOp_movd_q_Pd_Ey
6250 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6251 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6252 */
6253 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6254 if (IEM_IS_MODRM_REG_MODE(bRm))
6255 {
6256 /* MMX, greg32 */
6257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6258 IEM_MC_BEGIN(0, 1);
6259 IEM_MC_LOCAL(uint32_t, u32Tmp);
6260
6261 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6262 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6263 IEM_MC_FPU_TO_MMX_MODE();
6264
6265 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6266 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6267
6268 IEM_MC_ADVANCE_RIP_AND_FINISH();
6269 IEM_MC_END();
6270 }
6271 else
6272 {
6273 /* MMX, [mem32] */
6274 IEM_MC_BEGIN(0, 2);
6275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6276 IEM_MC_LOCAL(uint32_t, u32Tmp);
6277
6278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6280 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6281 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6282 IEM_MC_FPU_TO_MMX_MODE();
6283
6284 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6285 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6286
6287 IEM_MC_ADVANCE_RIP_AND_FINISH();
6288 IEM_MC_END();
6289 }
6290 }
6291}
6292
6293FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6294{
6295 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6296 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6297 {
6298 /**
6299 * @opcode 0x6e
6300 * @opcodesub rex.w=1
6301 * @oppfx 0x66
6302 * @opcpuid sse2
6303 * @opgroup og_sse2_simdint_datamove
6304 * @opxcpttype 5
6305 * @optest 64-bit / op1=1 op2=2 -> op1=2
6306 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6307 */
6308 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6309 if (IEM_IS_MODRM_REG_MODE(bRm))
6310 {
6311 /* XMM, greg64 */
6312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6313 IEM_MC_BEGIN(0, 1);
6314 IEM_MC_LOCAL(uint64_t, u64Tmp);
6315
6316 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6317 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6318
6319 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6320 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6321
6322 IEM_MC_ADVANCE_RIP_AND_FINISH();
6323 IEM_MC_END();
6324 }
6325 else
6326 {
6327 /* XMM, [mem64] */
6328 IEM_MC_BEGIN(0, 2);
6329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6330 IEM_MC_LOCAL(uint64_t, u64Tmp);
6331
6332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6334 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6335 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6336
6337 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6338 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6339
6340 IEM_MC_ADVANCE_RIP_AND_FINISH();
6341 IEM_MC_END();
6342 }
6343 }
6344 else
6345 {
6346 /**
6347 * @opdone
6348 * @opcode 0x6e
6349 * @opcodesub rex.w=0
6350 * @oppfx 0x66
6351 * @opcpuid sse2
6352 * @opgroup og_sse2_simdint_datamove
6353 * @opxcpttype 5
6354 * @opfunction iemOp_movd_q_Vy_Ey
6355 * @optest op1=1 op2=2 -> op1=2
6356 * @optest op1=0 op2=-42 -> op1=-42
6357 */
6358 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6359 if (IEM_IS_MODRM_REG_MODE(bRm))
6360 {
6361 /* XMM, greg32 */
6362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6363 IEM_MC_BEGIN(0, 1);
6364 IEM_MC_LOCAL(uint32_t, u32Tmp);
6365
6366 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6367 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6368
6369 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6370 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6371
6372 IEM_MC_ADVANCE_RIP_AND_FINISH();
6373 IEM_MC_END();
6374 }
6375 else
6376 {
6377 /* XMM, [mem32] */
6378 IEM_MC_BEGIN(0, 2);
6379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6380 IEM_MC_LOCAL(uint32_t, u32Tmp);
6381
6382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6384 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6385 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6386
6387 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6388 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6389
6390 IEM_MC_ADVANCE_RIP_AND_FINISH();
6391 IEM_MC_END();
6392 }
6393 }
6394}
6395
6396/* Opcode 0xf3 0x0f 0x6e - invalid */
6397
6398
6399/**
6400 * @opcode 0x6f
6401 * @oppfx none
6402 * @opcpuid mmx
6403 * @opgroup og_mmx_datamove
6404 * @opxcpttype 5
6405 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6406 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6407 */
6408FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6409{
6410 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6411 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6412 if (IEM_IS_MODRM_REG_MODE(bRm))
6413 {
6414 /*
6415 * Register, register.
6416 */
6417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6418 IEM_MC_BEGIN(0, 1);
6419 IEM_MC_LOCAL(uint64_t, u64Tmp);
6420
6421 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6422 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6423 IEM_MC_FPU_TO_MMX_MODE();
6424
6425 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6426 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6427
6428 IEM_MC_ADVANCE_RIP_AND_FINISH();
6429 IEM_MC_END();
6430 }
6431 else
6432 {
6433 /*
6434 * Register, memory.
6435 */
6436 IEM_MC_BEGIN(0, 2);
6437 IEM_MC_LOCAL(uint64_t, u64Tmp);
6438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6439
6440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6442 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6443 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6444 IEM_MC_FPU_TO_MMX_MODE();
6445
6446 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6447 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6448
6449 IEM_MC_ADVANCE_RIP_AND_FINISH();
6450 IEM_MC_END();
6451 }
6452}
6453
6454/**
6455 * @opcode 0x6f
6456 * @oppfx 0x66
6457 * @opcpuid sse2
6458 * @opgroup og_sse2_simdint_datamove
6459 * @opxcpttype 1
6460 * @optest op1=1 op2=2 -> op1=2
6461 * @optest op1=0 op2=-42 -> op1=-42
6462 */
6463FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6464{
6465 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6467 if (IEM_IS_MODRM_REG_MODE(bRm))
6468 {
6469 /*
6470 * Register, register.
6471 */
6472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6473 IEM_MC_BEGIN(0, 0);
6474
6475 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6476 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6477
6478 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6479 IEM_GET_MODRM_RM(pVCpu, bRm));
6480 IEM_MC_ADVANCE_RIP_AND_FINISH();
6481 IEM_MC_END();
6482 }
6483 else
6484 {
6485 /*
6486 * Register, memory.
6487 */
6488 IEM_MC_BEGIN(0, 2);
6489 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6491
6492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6494 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6495 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6496
6497 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6498 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6499
6500 IEM_MC_ADVANCE_RIP_AND_FINISH();
6501 IEM_MC_END();
6502 }
6503}
6504
6505/**
6506 * @opcode 0x6f
6507 * @oppfx 0xf3
6508 * @opcpuid sse2
6509 * @opgroup og_sse2_simdint_datamove
6510 * @opxcpttype 4UA
6511 * @optest op1=1 op2=2 -> op1=2
6512 * @optest op1=0 op2=-42 -> op1=-42
6513 */
6514FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6515{
6516 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6517 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6518 if (IEM_IS_MODRM_REG_MODE(bRm))
6519 {
6520 /*
6521 * Register, register.
6522 */
6523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6524 IEM_MC_BEGIN(0, 0);
6525 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6526 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6527 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6528 IEM_GET_MODRM_RM(pVCpu, bRm));
6529 IEM_MC_ADVANCE_RIP_AND_FINISH();
6530 IEM_MC_END();
6531 }
6532 else
6533 {
6534 /*
6535 * Register, memory.
6536 */
6537 IEM_MC_BEGIN(0, 2);
6538 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6540
6541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6543 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6544 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6545 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6546 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6547
6548 IEM_MC_ADVANCE_RIP_AND_FINISH();
6549 IEM_MC_END();
6550 }
6551}
6552
6553
6554/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6555FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6556{
6557 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6558 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6559 if (IEM_IS_MODRM_REG_MODE(bRm))
6560 {
6561 /*
6562 * Register, register.
6563 */
6564 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6566
6567 IEM_MC_BEGIN(3, 0);
6568 IEM_MC_ARG(uint64_t *, pDst, 0);
6569 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6570 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6571 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6572 IEM_MC_PREPARE_FPU_USAGE();
6573 IEM_MC_FPU_TO_MMX_MODE();
6574
6575 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6576 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6577 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6578 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6579
6580 IEM_MC_ADVANCE_RIP_AND_FINISH();
6581 IEM_MC_END();
6582 }
6583 else
6584 {
6585 /*
6586 * Register, memory.
6587 */
6588 IEM_MC_BEGIN(3, 2);
6589 IEM_MC_ARG(uint64_t *, pDst, 0);
6590 IEM_MC_LOCAL(uint64_t, uSrc);
6591 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6593
6594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6595 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6596 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6598 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6599 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6600
6601 IEM_MC_PREPARE_FPU_USAGE();
6602 IEM_MC_FPU_TO_MMX_MODE();
6603
6604 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6605 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6606 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6607
6608 IEM_MC_ADVANCE_RIP_AND_FINISH();
6609 IEM_MC_END();
6610 }
6611}
6612
6613
6614/**
6615 * Common worker for SSE2 instructions on the forms:
6616 * pshufd xmm1, xmm2/mem128, imm8
6617 * pshufhw xmm1, xmm2/mem128, imm8
6618 * pshuflw xmm1, xmm2/mem128, imm8
6619 *
6620 * Proper alignment of the 128-bit operand is enforced.
6621 * Exceptions type 4. SSE2 cpuid checks.
6622 */
6623FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6624{
6625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6626 if (IEM_IS_MODRM_REG_MODE(bRm))
6627 {
6628 /*
6629 * Register, register.
6630 */
6631 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6633
6634 IEM_MC_BEGIN(3, 0);
6635 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6636 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6637 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6638 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6639 IEM_MC_PREPARE_SSE_USAGE();
6640 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6641 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6642 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6643 IEM_MC_ADVANCE_RIP_AND_FINISH();
6644 IEM_MC_END();
6645 }
6646 else
6647 {
6648 /*
6649 * Register, memory.
6650 */
6651 IEM_MC_BEGIN(3, 2);
6652 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6653 IEM_MC_LOCAL(RTUINT128U, uSrc);
6654 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6655 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6656
6657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6658 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6659 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6661 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6662
6663 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6664 IEM_MC_PREPARE_SSE_USAGE();
6665 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6666 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6667
6668 IEM_MC_ADVANCE_RIP_AND_FINISH();
6669 IEM_MC_END();
6670 }
6671}
6672
6673
6674/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6675FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6676{
6677 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6678 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6679}
6680
6681
6682/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6683FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6684{
6685 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6686 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6687}
6688
6689
6690/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6691FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6692{
6693 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6694 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6695}
6696
6697
6698/**
6699 * Common worker for MMX instructions of the form:
6700 * psrlw mm, imm8
6701 * psraw mm, imm8
6702 * psllw mm, imm8
6703 * psrld mm, imm8
6704 * psrad mm, imm8
6705 * pslld mm, imm8
6706 * psrlq mm, imm8
6707 * psllq mm, imm8
6708 *
6709 */
6710FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6711{
6712 if (IEM_IS_MODRM_REG_MODE(bRm))
6713 {
6714 /*
6715 * Register, immediate.
6716 */
6717 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6719
6720 IEM_MC_BEGIN(2, 0);
6721 IEM_MC_ARG(uint64_t *, pDst, 0);
6722 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6723 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6724 IEM_MC_PREPARE_FPU_USAGE();
6725 IEM_MC_FPU_TO_MMX_MODE();
6726
6727 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6728 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6729 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6730
6731 IEM_MC_ADVANCE_RIP_AND_FINISH();
6732 IEM_MC_END();
6733 }
6734 else
6735 {
6736 /*
6737 * Register, memory not supported.
6738 */
6739 /// @todo Caller already enforced register mode?!
6740 AssertFailedReturn(VINF_SUCCESS);
6741 }
6742}
6743
6744
6745/**
6746 * Common worker for SSE2 instructions of the form:
6747 * psrlw xmm, imm8
6748 * psraw xmm, imm8
6749 * psllw xmm, imm8
6750 * psrld xmm, imm8
6751 * psrad xmm, imm8
6752 * pslld xmm, imm8
6753 * psrlq xmm, imm8
6754 * psllq xmm, imm8
6755 *
6756 */
6757FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6758{
6759 if (IEM_IS_MODRM_REG_MODE(bRm))
6760 {
6761 /*
6762 * Register, immediate.
6763 */
6764 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6766
6767 IEM_MC_BEGIN(2, 0);
6768 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6769 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6770 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6771 IEM_MC_PREPARE_SSE_USAGE();
6772 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6773 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6774 IEM_MC_ADVANCE_RIP_AND_FINISH();
6775 IEM_MC_END();
6776 }
6777 else
6778 {
6779 /*
6780 * Register, memory.
6781 */
6782 /// @todo Caller already enforced register mode?!
6783 AssertFailedReturn(VINF_SUCCESS);
6784 }
6785}
6786
6787
6788/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6789FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6790{
6791// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6792 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6793}
6794
6795
6796/** Opcode 0x66 0x0f 0x71 11/2. */
6797FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6798{
6799// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6800 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6801}
6802
6803
6804/** Opcode 0x0f 0x71 11/4. */
6805FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6806{
6807// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6808 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6809}
6810
6811
6812/** Opcode 0x66 0x0f 0x71 11/4. */
6813FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6814{
6815// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6816 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6817}
6818
6819
6820/** Opcode 0x0f 0x71 11/6. */
6821FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6822{
6823// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6824 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6825}
6826
6827
6828/** Opcode 0x66 0x0f 0x71 11/6. */
6829FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6830{
6831// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6832 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6833}
6834
6835
6836/**
6837 * Group 12 jump table for register variant.
6838 */
6839IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6840{
6841 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6842 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6843 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6844 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6845 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6846 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6847 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6848 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6849};
6850AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6851
6852
6853/** Opcode 0x0f 0x71. */
6854FNIEMOP_DEF(iemOp_Grp12)
6855{
6856 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6857 if (IEM_IS_MODRM_REG_MODE(bRm))
6858 /* register, register */
6859 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6860 + pVCpu->iem.s.idxPrefix], bRm);
6861 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6862}
6863
6864
6865/** Opcode 0x0f 0x72 11/2. */
6866FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6867{
6868// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6869 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6870}
6871
6872
6873/** Opcode 0x66 0x0f 0x72 11/2. */
6874FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6875{
6876// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6877 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6878}
6879
6880
6881/** Opcode 0x0f 0x72 11/4. */
6882FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6883{
6884// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6885 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6886}
6887
6888
6889/** Opcode 0x66 0x0f 0x72 11/4. */
6890FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6891{
6892// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6893 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6894}
6895
6896
6897/** Opcode 0x0f 0x72 11/6. */
6898FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6899{
6900// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6901 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6902}
6903
6904/** Opcode 0x66 0x0f 0x72 11/6. */
6905FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6906{
6907// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6908 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6909}
6910
6911
6912/**
6913 * Group 13 jump table for register variant.
6914 */
6915IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6916{
6917 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6918 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6919 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6920 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6921 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6922 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6923 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6924 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6925};
6926AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6927
6928/** Opcode 0x0f 0x72. */
6929FNIEMOP_DEF(iemOp_Grp13)
6930{
6931 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6932 if (IEM_IS_MODRM_REG_MODE(bRm))
6933 /* register, register */
6934 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6935 + pVCpu->iem.s.idxPrefix], bRm);
6936 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6937}
6938
6939
6940/** Opcode 0x0f 0x73 11/2. */
6941FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6942{
6943// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6944 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6945}
6946
6947
6948/** Opcode 0x66 0x0f 0x73 11/2. */
6949FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6950{
6951// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6952 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6953}
6954
6955
6956/** Opcode 0x66 0x0f 0x73 11/3. */
6957FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6958{
6959// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6960 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6961}
6962
6963
6964/** Opcode 0x0f 0x73 11/6. */
6965FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6966{
6967// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6968 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6969}
6970
6971
6972/** Opcode 0x66 0x0f 0x73 11/6. */
6973FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6974{
6975// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6976 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6977}
6978
6979
6980/** Opcode 0x66 0x0f 0x73 11/7. */
6981FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6982{
6983// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6984 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6985}
6986
6987/**
6988 * Group 14 jump table for register variant.
6989 */
6990IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6991{
6992 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6993 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6994 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6995 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6996 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6997 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6998 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6999 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7000};
7001AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
7002
7003
7004/** Opcode 0x0f 0x73. */
7005FNIEMOP_DEF(iemOp_Grp14)
7006{
7007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7008 if (IEM_IS_MODRM_REG_MODE(bRm))
7009 /* register, register */
7010 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7011 + pVCpu->iem.s.idxPrefix], bRm);
7012 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
7013}
7014
7015
7016/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
7017FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
7018{
7019 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7020 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
7021}
7022
7023
7024/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
7025FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
7026{
7027 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7028 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
7029}
7030
7031
7032/* Opcode 0xf3 0x0f 0x74 - invalid */
7033/* Opcode 0xf2 0x0f 0x74 - invalid */
7034
7035
7036/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
7037FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
7038{
7039 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7040 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
7041}
7042
7043
7044/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
7045FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
7046{
7047 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7048 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
7049}
7050
7051
7052/* Opcode 0xf3 0x0f 0x75 - invalid */
7053/* Opcode 0xf2 0x0f 0x75 - invalid */
7054
7055
7056/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
7057FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
7058{
7059 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7060 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
7061}
7062
7063
7064/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
7065FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
7066{
7067 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7068 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
7069}
7070
7071
7072/* Opcode 0xf3 0x0f 0x76 - invalid */
7073/* Opcode 0xf2 0x0f 0x76 - invalid */
7074
7075
7076/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
7077FNIEMOP_DEF(iemOp_emms)
7078{
7079 IEMOP_MNEMONIC(emms, "emms");
7080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7081
7082 IEM_MC_BEGIN(0,0);
7083 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7084 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7085 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7086 IEM_MC_FPU_FROM_MMX_MODE();
7087 IEM_MC_ADVANCE_RIP_AND_FINISH();
7088 IEM_MC_END();
7089}
7090
7091/* Opcode 0x66 0x0f 0x77 - invalid */
7092/* Opcode 0xf3 0x0f 0x77 - invalid */
7093/* Opcode 0xf2 0x0f 0x77 - invalid */
7094
7095/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
7096#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7097FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
7098{
7099 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
7100 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
7101 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
7102 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
7103
7104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7105 if (IEM_IS_MODRM_REG_MODE(bRm))
7106 {
7107 /*
7108 * Register, register.
7109 */
7110 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7111 if (enmEffOpSize == IEMMODE_64BIT)
7112 {
7113 IEM_MC_BEGIN(2, 0);
7114 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7115 IEM_MC_ARG(uint64_t, u64Enc, 1);
7116 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7117 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7118 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
7119 IEM_MC_END();
7120 }
7121 else
7122 {
7123 IEM_MC_BEGIN(2, 0);
7124 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7125 IEM_MC_ARG(uint32_t, u32Enc, 1);
7126 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7127 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7128 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
7129 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7130 IEM_MC_END();
7131 }
7132 }
7133 else
7134 {
7135 /*
7136 * Memory, register.
7137 */
7138 if (enmEffOpSize == IEMMODE_64BIT)
7139 {
7140 IEM_MC_BEGIN(3, 0);
7141 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7142 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7143 IEM_MC_ARG(uint64_t, u64Enc, 2);
7144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7145 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7146 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7147 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7148 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7149 IEM_MC_END();
7150 }
7151 else
7152 {
7153 IEM_MC_BEGIN(3, 0);
7154 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7155 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7156 IEM_MC_ARG(uint32_t, u32Enc, 2);
7157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7158 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7159 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7160 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7161 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7162 IEM_MC_END();
7163 }
7164 }
7165 return VINF_SUCCESS;
7166}
7167#else
7168FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
7169#endif
7170
7171/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7172FNIEMOP_STUB(iemOp_AmdGrp17);
7173/* Opcode 0xf3 0x0f 0x78 - invalid */
7174/* Opcode 0xf2 0x0f 0x78 - invalid */
7175
7176/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7177#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7178FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7179{
7180 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7181 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7182 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7183 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
7184
7185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7186 if (IEM_IS_MODRM_REG_MODE(bRm))
7187 {
7188 /*
7189 * Register, register.
7190 */
7191 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7192 if (enmEffOpSize == IEMMODE_64BIT)
7193 {
7194 IEM_MC_BEGIN(2, 0);
7195 IEM_MC_ARG(uint64_t, u64Val, 0);
7196 IEM_MC_ARG(uint64_t, u64Enc, 1);
7197 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7198 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7199 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
7200 IEM_MC_END();
7201 }
7202 else
7203 {
7204 IEM_MC_BEGIN(2, 0);
7205 IEM_MC_ARG(uint32_t, u32Val, 0);
7206 IEM_MC_ARG(uint32_t, u32Enc, 1);
7207 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7208 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7209 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
7210 IEM_MC_END();
7211 }
7212 }
7213 else
7214 {
7215 /*
7216 * Register, memory.
7217 */
7218 if (enmEffOpSize == IEMMODE_64BIT)
7219 {
7220 IEM_MC_BEGIN(3, 0);
7221 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7222 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7223 IEM_MC_ARG(uint64_t, u64Enc, 2);
7224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7225 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7226 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7227 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7228 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7229 IEM_MC_END();
7230 }
7231 else
7232 {
7233 IEM_MC_BEGIN(3, 0);
7234 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7235 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7236 IEM_MC_ARG(uint32_t, u32Enc, 2);
7237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7238 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7239 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7240 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7241 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7242 IEM_MC_END();
7243 }
7244 }
7245 return VINF_SUCCESS;
7246}
7247#else
7248FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
7249#endif
7250/* Opcode 0x66 0x0f 0x79 - invalid */
7251/* Opcode 0xf3 0x0f 0x79 - invalid */
7252/* Opcode 0xf2 0x0f 0x79 - invalid */
7253
7254/* Opcode 0x0f 0x7a - invalid */
7255/* Opcode 0x66 0x0f 0x7a - invalid */
7256/* Opcode 0xf3 0x0f 0x7a - invalid */
7257/* Opcode 0xf2 0x0f 0x7a - invalid */
7258
7259/* Opcode 0x0f 0x7b - invalid */
7260/* Opcode 0x66 0x0f 0x7b - invalid */
7261/* Opcode 0xf3 0x0f 0x7b - invalid */
7262/* Opcode 0xf2 0x0f 0x7b - invalid */
7263
7264/* Opcode 0x0f 0x7c - invalid */
7265
7266
7267/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7268FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7269{
7270 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7271 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7272}
7273
7274
7275/* Opcode 0xf3 0x0f 0x7c - invalid */
7276
7277
7278/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7279FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7280{
7281 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7282 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7283}
7284
7285
7286/* Opcode 0x0f 0x7d - invalid */
7287
7288
7289/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7290FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7291{
7292 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7293 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7294}
7295
7296
7297/* Opcode 0xf3 0x0f 0x7d - invalid */
7298
7299
7300/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7301FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7302{
7303 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7304 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7305}
7306
7307
7308/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7309FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7310{
7311 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7312 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7313 {
7314 /**
7315 * @opcode 0x7e
7316 * @opcodesub rex.w=1
7317 * @oppfx none
7318 * @opcpuid mmx
7319 * @opgroup og_mmx_datamove
7320 * @opxcpttype 5
7321 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7322 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7323 */
7324 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7325 if (IEM_IS_MODRM_REG_MODE(bRm))
7326 {
7327 /* greg64, MMX */
7328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7329 IEM_MC_BEGIN(0, 1);
7330 IEM_MC_LOCAL(uint64_t, u64Tmp);
7331
7332 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7333 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7334 IEM_MC_FPU_TO_MMX_MODE();
7335
7336 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7337 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7338
7339 IEM_MC_ADVANCE_RIP_AND_FINISH();
7340 IEM_MC_END();
7341 }
7342 else
7343 {
7344 /* [mem64], MMX */
7345 IEM_MC_BEGIN(0, 2);
7346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7347 IEM_MC_LOCAL(uint64_t, u64Tmp);
7348
7349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7351 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7352 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7353 IEM_MC_FPU_TO_MMX_MODE();
7354
7355 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7356 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7357
7358 IEM_MC_ADVANCE_RIP_AND_FINISH();
7359 IEM_MC_END();
7360 }
7361 }
7362 else
7363 {
7364 /**
7365 * @opdone
7366 * @opcode 0x7e
7367 * @opcodesub rex.w=0
7368 * @oppfx none
7369 * @opcpuid mmx
7370 * @opgroup og_mmx_datamove
7371 * @opxcpttype 5
7372 * @opfunction iemOp_movd_q_Pd_Ey
7373 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7374 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7375 */
7376 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7377 if (IEM_IS_MODRM_REG_MODE(bRm))
7378 {
7379 /* greg32, MMX */
7380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7381 IEM_MC_BEGIN(0, 1);
7382 IEM_MC_LOCAL(uint32_t, u32Tmp);
7383
7384 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7385 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7386 IEM_MC_FPU_TO_MMX_MODE();
7387
7388 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7389 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7390
7391 IEM_MC_ADVANCE_RIP_AND_FINISH();
7392 IEM_MC_END();
7393 }
7394 else
7395 {
7396 /* [mem32], MMX */
7397 IEM_MC_BEGIN(0, 2);
7398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7399 IEM_MC_LOCAL(uint32_t, u32Tmp);
7400
7401 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7403 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7404 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7405 IEM_MC_FPU_TO_MMX_MODE();
7406
7407 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7408 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7409
7410 IEM_MC_ADVANCE_RIP_AND_FINISH();
7411 IEM_MC_END();
7412 }
7413 }
7414}
7415
7416
7417FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7418{
7419 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7420 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7421 {
7422 /**
7423 * @opcode 0x7e
7424 * @opcodesub rex.w=1
7425 * @oppfx 0x66
7426 * @opcpuid sse2
7427 * @opgroup og_sse2_simdint_datamove
7428 * @opxcpttype 5
7429 * @optest 64-bit / op1=1 op2=2 -> op1=2
7430 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7431 */
7432 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7433 if (IEM_IS_MODRM_REG_MODE(bRm))
7434 {
7435 /* greg64, XMM */
7436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7437 IEM_MC_BEGIN(0, 1);
7438 IEM_MC_LOCAL(uint64_t, u64Tmp);
7439
7440 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7441 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7442
7443 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7444 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7445
7446 IEM_MC_ADVANCE_RIP_AND_FINISH();
7447 IEM_MC_END();
7448 }
7449 else
7450 {
7451 /* [mem64], XMM */
7452 IEM_MC_BEGIN(0, 2);
7453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7454 IEM_MC_LOCAL(uint64_t, u64Tmp);
7455
7456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7458 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7459 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7460
7461 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7462 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7463
7464 IEM_MC_ADVANCE_RIP_AND_FINISH();
7465 IEM_MC_END();
7466 }
7467 }
7468 else
7469 {
7470 /**
7471 * @opdone
7472 * @opcode 0x7e
7473 * @opcodesub rex.w=0
7474 * @oppfx 0x66
7475 * @opcpuid sse2
7476 * @opgroup og_sse2_simdint_datamove
7477 * @opxcpttype 5
7478 * @opfunction iemOp_movd_q_Vy_Ey
7479 * @optest op1=1 op2=2 -> op1=2
7480 * @optest op1=0 op2=-42 -> op1=-42
7481 */
7482 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7483 if (IEM_IS_MODRM_REG_MODE(bRm))
7484 {
7485 /* greg32, XMM */
7486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7487 IEM_MC_BEGIN(0, 1);
7488 IEM_MC_LOCAL(uint32_t, u32Tmp);
7489
7490 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7491 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7492
7493 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7494 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7495
7496 IEM_MC_ADVANCE_RIP_AND_FINISH();
7497 IEM_MC_END();
7498 }
7499 else
7500 {
7501 /* [mem32], XMM */
7502 IEM_MC_BEGIN(0, 2);
7503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7504 IEM_MC_LOCAL(uint32_t, u32Tmp);
7505
7506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7508 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7509 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7510
7511 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7512 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7513
7514 IEM_MC_ADVANCE_RIP_AND_FINISH();
7515 IEM_MC_END();
7516 }
7517 }
7518}
7519
7520/**
7521 * @opcode 0x7e
7522 * @oppfx 0xf3
7523 * @opcpuid sse2
7524 * @opgroup og_sse2_pcksclr_datamove
7525 * @opxcpttype none
7526 * @optest op1=1 op2=2 -> op1=2
7527 * @optest op1=0 op2=-42 -> op1=-42
7528 */
7529FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7530{
7531 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7532 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7533 if (IEM_IS_MODRM_REG_MODE(bRm))
7534 {
7535 /*
7536 * XMM128, XMM64.
7537 */
7538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7539 IEM_MC_BEGIN(0, 2);
7540 IEM_MC_LOCAL(uint64_t, uSrc);
7541
7542 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7543 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7544
7545 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7546 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7547
7548 IEM_MC_ADVANCE_RIP_AND_FINISH();
7549 IEM_MC_END();
7550 }
7551 else
7552 {
7553 /*
7554 * XMM128, [mem64].
7555 */
7556 IEM_MC_BEGIN(0, 2);
7557 IEM_MC_LOCAL(uint64_t, uSrc);
7558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7559
7560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7562 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7563 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7564
7565 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7566 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7567
7568 IEM_MC_ADVANCE_RIP_AND_FINISH();
7569 IEM_MC_END();
7570 }
7571}
7572
7573/* Opcode 0xf2 0x0f 0x7e - invalid */
7574
7575
7576/** Opcode 0x0f 0x7f - movq Qq, Pq */
7577FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7578{
7579 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7581 if (IEM_IS_MODRM_REG_MODE(bRm))
7582 {
7583 /*
7584 * MMX, MMX.
7585 */
7586 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7587 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7589 IEM_MC_BEGIN(0, 1);
7590 IEM_MC_LOCAL(uint64_t, u64Tmp);
7591 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7592 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7593 IEM_MC_FPU_TO_MMX_MODE();
7594
7595 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7596 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7597
7598 IEM_MC_ADVANCE_RIP_AND_FINISH();
7599 IEM_MC_END();
7600 }
7601 else
7602 {
7603 /*
7604 * [mem64], MMX.
7605 */
7606 IEM_MC_BEGIN(0, 2);
7607 IEM_MC_LOCAL(uint64_t, u64Tmp);
7608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7609
7610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7612 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7613 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7614 IEM_MC_FPU_TO_MMX_MODE();
7615
7616 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7617 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7618
7619 IEM_MC_ADVANCE_RIP_AND_FINISH();
7620 IEM_MC_END();
7621 }
7622}
7623
7624/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7625FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7626{
7627 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7628 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7629 if (IEM_IS_MODRM_REG_MODE(bRm))
7630 {
7631 /*
7632 * XMM, XMM.
7633 */
7634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7635 IEM_MC_BEGIN(0, 0);
7636 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7637 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7638 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7639 IEM_GET_MODRM_REG(pVCpu, bRm));
7640 IEM_MC_ADVANCE_RIP_AND_FINISH();
7641 IEM_MC_END();
7642 }
7643 else
7644 {
7645 /*
7646 * [mem128], XMM.
7647 */
7648 IEM_MC_BEGIN(0, 2);
7649 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7651
7652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7654 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7655 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7656
7657 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7658 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7659
7660 IEM_MC_ADVANCE_RIP_AND_FINISH();
7661 IEM_MC_END();
7662 }
7663}
7664
7665/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7666FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7667{
7668 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7670 if (IEM_IS_MODRM_REG_MODE(bRm))
7671 {
7672 /*
7673 * XMM, XMM.
7674 */
7675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7676 IEM_MC_BEGIN(0, 0);
7677 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7678 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7679 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7680 IEM_GET_MODRM_REG(pVCpu, bRm));
7681 IEM_MC_ADVANCE_RIP_AND_FINISH();
7682 IEM_MC_END();
7683 }
7684 else
7685 {
7686 /*
7687 * [mem128], XMM.
7688 */
7689 IEM_MC_BEGIN(0, 2);
7690 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7692
7693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7695 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7696 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7697
7698 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7699 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7700
7701 IEM_MC_ADVANCE_RIP_AND_FINISH();
7702 IEM_MC_END();
7703 }
7704}
7705
7706/* Opcode 0xf2 0x0f 0x7f - invalid */
7707
7708
7709
7710/** Opcode 0x0f 0x80. */
7711FNIEMOP_DEF(iemOp_jo_Jv)
7712{
7713 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7714 IEMOP_HLP_MIN_386();
7715 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7716 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7717 {
7718 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7720
7721 IEM_MC_BEGIN(0, 0);
7722 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7723 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7724 } IEM_MC_ELSE() {
7725 IEM_MC_ADVANCE_RIP_AND_FINISH();
7726 } IEM_MC_ENDIF();
7727 IEM_MC_END();
7728 }
7729 else
7730 {
7731 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7733
7734 IEM_MC_BEGIN(0, 0);
7735 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7736 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7737 } IEM_MC_ELSE() {
7738 IEM_MC_ADVANCE_RIP_AND_FINISH();
7739 } IEM_MC_ENDIF();
7740 IEM_MC_END();
7741 }
7742}
7743
7744
7745/** Opcode 0x0f 0x81. */
7746FNIEMOP_DEF(iemOp_jno_Jv)
7747{
7748 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7749 IEMOP_HLP_MIN_386();
7750 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7751 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7752 {
7753 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7755
7756 IEM_MC_BEGIN(0, 0);
7757 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7758 IEM_MC_ADVANCE_RIP_AND_FINISH();
7759 } IEM_MC_ELSE() {
7760 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7761 } IEM_MC_ENDIF();
7762 IEM_MC_END();
7763 }
7764 else
7765 {
7766 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7768
7769 IEM_MC_BEGIN(0, 0);
7770 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7771 IEM_MC_ADVANCE_RIP_AND_FINISH();
7772 } IEM_MC_ELSE() {
7773 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7774 } IEM_MC_ENDIF();
7775 IEM_MC_END();
7776 }
7777}
7778
7779
7780/** Opcode 0x0f 0x82. */
7781FNIEMOP_DEF(iemOp_jc_Jv)
7782{
7783 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7784 IEMOP_HLP_MIN_386();
7785 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7786 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7787 {
7788 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7790
7791 IEM_MC_BEGIN(0, 0);
7792 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7793 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7794 } IEM_MC_ELSE() {
7795 IEM_MC_ADVANCE_RIP_AND_FINISH();
7796 } IEM_MC_ENDIF();
7797 IEM_MC_END();
7798 }
7799 else
7800 {
7801 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7803
7804 IEM_MC_BEGIN(0, 0);
7805 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7806 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7807 } IEM_MC_ELSE() {
7808 IEM_MC_ADVANCE_RIP_AND_FINISH();
7809 } IEM_MC_ENDIF();
7810 IEM_MC_END();
7811 }
7812}
7813
7814
7815/** Opcode 0x0f 0x83. */
7816FNIEMOP_DEF(iemOp_jnc_Jv)
7817{
7818 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7819 IEMOP_HLP_MIN_386();
7820 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7821 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7822 {
7823 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7825
7826 IEM_MC_BEGIN(0, 0);
7827 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7828 IEM_MC_ADVANCE_RIP_AND_FINISH();
7829 } IEM_MC_ELSE() {
7830 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7831 } IEM_MC_ENDIF();
7832 IEM_MC_END();
7833 }
7834 else
7835 {
7836 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7838
7839 IEM_MC_BEGIN(0, 0);
7840 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7841 IEM_MC_ADVANCE_RIP_AND_FINISH();
7842 } IEM_MC_ELSE() {
7843 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7844 } IEM_MC_ENDIF();
7845 IEM_MC_END();
7846 }
7847}
7848
7849
7850/** Opcode 0x0f 0x84. */
7851FNIEMOP_DEF(iemOp_je_Jv)
7852{
7853 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7854 IEMOP_HLP_MIN_386();
7855 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7856 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7857 {
7858 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7860
7861 IEM_MC_BEGIN(0, 0);
7862 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7863 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7864 } IEM_MC_ELSE() {
7865 IEM_MC_ADVANCE_RIP_AND_FINISH();
7866 } IEM_MC_ENDIF();
7867 IEM_MC_END();
7868 }
7869 else
7870 {
7871 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7873
7874 IEM_MC_BEGIN(0, 0);
7875 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7876 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7877 } IEM_MC_ELSE() {
7878 IEM_MC_ADVANCE_RIP_AND_FINISH();
7879 } IEM_MC_ENDIF();
7880 IEM_MC_END();
7881 }
7882}
7883
7884
7885/** Opcode 0x0f 0x85. */
7886FNIEMOP_DEF(iemOp_jne_Jv)
7887{
7888 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7889 IEMOP_HLP_MIN_386();
7890 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7891 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7892 {
7893 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7895
7896 IEM_MC_BEGIN(0, 0);
7897 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7898 IEM_MC_ADVANCE_RIP_AND_FINISH();
7899 } IEM_MC_ELSE() {
7900 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7901 } IEM_MC_ENDIF();
7902 IEM_MC_END();
7903 }
7904 else
7905 {
7906 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7908
7909 IEM_MC_BEGIN(0, 0);
7910 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7911 IEM_MC_ADVANCE_RIP_AND_FINISH();
7912 } IEM_MC_ELSE() {
7913 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7914 } IEM_MC_ENDIF();
7915 IEM_MC_END();
7916 }
7917}
7918
7919
7920/** Opcode 0x0f 0x86. */
7921FNIEMOP_DEF(iemOp_jbe_Jv)
7922{
7923 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7924 IEMOP_HLP_MIN_386();
7925 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7926 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7927 {
7928 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7930
7931 IEM_MC_BEGIN(0, 0);
7932 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7933 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7934 } IEM_MC_ELSE() {
7935 IEM_MC_ADVANCE_RIP_AND_FINISH();
7936 } IEM_MC_ENDIF();
7937 IEM_MC_END();
7938 }
7939 else
7940 {
7941 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7943
7944 IEM_MC_BEGIN(0, 0);
7945 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7946 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7947 } IEM_MC_ELSE() {
7948 IEM_MC_ADVANCE_RIP_AND_FINISH();
7949 } IEM_MC_ENDIF();
7950 IEM_MC_END();
7951 }
7952}
7953
7954
7955/** Opcode 0x0f 0x87. */
7956FNIEMOP_DEF(iemOp_jnbe_Jv)
7957{
7958 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7959 IEMOP_HLP_MIN_386();
7960 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7961 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7962 {
7963 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7965
7966 IEM_MC_BEGIN(0, 0);
7967 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7968 IEM_MC_ADVANCE_RIP_AND_FINISH();
7969 } IEM_MC_ELSE() {
7970 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7971 } IEM_MC_ENDIF();
7972 IEM_MC_END();
7973 }
7974 else
7975 {
7976 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7978
7979 IEM_MC_BEGIN(0, 0);
7980 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7981 IEM_MC_ADVANCE_RIP_AND_FINISH();
7982 } IEM_MC_ELSE() {
7983 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7984 } IEM_MC_ENDIF();
7985 IEM_MC_END();
7986 }
7987}
7988
7989
7990/** Opcode 0x0f 0x88. */
7991FNIEMOP_DEF(iemOp_js_Jv)
7992{
7993 IEMOP_MNEMONIC(js_Jv, "js Jv");
7994 IEMOP_HLP_MIN_386();
7995 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7996 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7997 {
7998 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8000
8001 IEM_MC_BEGIN(0, 0);
8002 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8003 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8004 } IEM_MC_ELSE() {
8005 IEM_MC_ADVANCE_RIP_AND_FINISH();
8006 } IEM_MC_ENDIF();
8007 IEM_MC_END();
8008 }
8009 else
8010 {
8011 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8013
8014 IEM_MC_BEGIN(0, 0);
8015 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8016 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8017 } IEM_MC_ELSE() {
8018 IEM_MC_ADVANCE_RIP_AND_FINISH();
8019 } IEM_MC_ENDIF();
8020 IEM_MC_END();
8021 }
8022}
8023
8024
8025/** Opcode 0x0f 0x89. */
8026FNIEMOP_DEF(iemOp_jns_Jv)
8027{
8028 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
8029 IEMOP_HLP_MIN_386();
8030 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8031 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8032 {
8033 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8035
8036 IEM_MC_BEGIN(0, 0);
8037 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8038 IEM_MC_ADVANCE_RIP_AND_FINISH();
8039 } IEM_MC_ELSE() {
8040 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8041 } IEM_MC_ENDIF();
8042 IEM_MC_END();
8043 }
8044 else
8045 {
8046 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8048
8049 IEM_MC_BEGIN(0, 0);
8050 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8051 IEM_MC_ADVANCE_RIP_AND_FINISH();
8052 } IEM_MC_ELSE() {
8053 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8054 } IEM_MC_ENDIF();
8055 IEM_MC_END();
8056 }
8057}
8058
8059
8060/** Opcode 0x0f 0x8a. */
8061FNIEMOP_DEF(iemOp_jp_Jv)
8062{
8063 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
8064 IEMOP_HLP_MIN_386();
8065 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8066 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8067 {
8068 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8070
8071 IEM_MC_BEGIN(0, 0);
8072 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8073 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8074 } IEM_MC_ELSE() {
8075 IEM_MC_ADVANCE_RIP_AND_FINISH();
8076 } IEM_MC_ENDIF();
8077 IEM_MC_END();
8078 }
8079 else
8080 {
8081 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8083
8084 IEM_MC_BEGIN(0, 0);
8085 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8086 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8087 } IEM_MC_ELSE() {
8088 IEM_MC_ADVANCE_RIP_AND_FINISH();
8089 } IEM_MC_ENDIF();
8090 IEM_MC_END();
8091 }
8092}
8093
8094
8095/** Opcode 0x0f 0x8b. */
8096FNIEMOP_DEF(iemOp_jnp_Jv)
8097{
8098 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
8099 IEMOP_HLP_MIN_386();
8100 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8101 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8102 {
8103 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8105
8106 IEM_MC_BEGIN(0, 0);
8107 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8108 IEM_MC_ADVANCE_RIP_AND_FINISH();
8109 } IEM_MC_ELSE() {
8110 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8111 } IEM_MC_ENDIF();
8112 IEM_MC_END();
8113 }
8114 else
8115 {
8116 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8118
8119 IEM_MC_BEGIN(0, 0);
8120 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8121 IEM_MC_ADVANCE_RIP_AND_FINISH();
8122 } IEM_MC_ELSE() {
8123 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8124 } IEM_MC_ENDIF();
8125 IEM_MC_END();
8126 }
8127}
8128
8129
8130/** Opcode 0x0f 0x8c. */
8131FNIEMOP_DEF(iemOp_jl_Jv)
8132{
8133 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8134 IEMOP_HLP_MIN_386();
8135 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8136 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8137 {
8138 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8140
8141 IEM_MC_BEGIN(0, 0);
8142 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8143 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8144 } IEM_MC_ELSE() {
8145 IEM_MC_ADVANCE_RIP_AND_FINISH();
8146 } IEM_MC_ENDIF();
8147 IEM_MC_END();
8148 }
8149 else
8150 {
8151 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8153
8154 IEM_MC_BEGIN(0, 0);
8155 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8156 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8157 } IEM_MC_ELSE() {
8158 IEM_MC_ADVANCE_RIP_AND_FINISH();
8159 } IEM_MC_ENDIF();
8160 IEM_MC_END();
8161 }
8162}
8163
8164
8165/** Opcode 0x0f 0x8d. */
8166FNIEMOP_DEF(iemOp_jnl_Jv)
8167{
8168 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8169 IEMOP_HLP_MIN_386();
8170 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8171 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8172 {
8173 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8175
8176 IEM_MC_BEGIN(0, 0);
8177 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8178 IEM_MC_ADVANCE_RIP_AND_FINISH();
8179 } IEM_MC_ELSE() {
8180 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8181 } IEM_MC_ENDIF();
8182 IEM_MC_END();
8183 }
8184 else
8185 {
8186 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8188
8189 IEM_MC_BEGIN(0, 0);
8190 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8191 IEM_MC_ADVANCE_RIP_AND_FINISH();
8192 } IEM_MC_ELSE() {
8193 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8194 } IEM_MC_ENDIF();
8195 IEM_MC_END();
8196 }
8197}
8198
8199
8200/** Opcode 0x0f 0x8e. */
8201FNIEMOP_DEF(iemOp_jle_Jv)
8202{
8203 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8204 IEMOP_HLP_MIN_386();
8205 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8206 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8207 {
8208 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8210
8211 IEM_MC_BEGIN(0, 0);
8212 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8213 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8214 } IEM_MC_ELSE() {
8215 IEM_MC_ADVANCE_RIP_AND_FINISH();
8216 } IEM_MC_ENDIF();
8217 IEM_MC_END();
8218 }
8219 else
8220 {
8221 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8223
8224 IEM_MC_BEGIN(0, 0);
8225 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8226 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8227 } IEM_MC_ELSE() {
8228 IEM_MC_ADVANCE_RIP_AND_FINISH();
8229 } IEM_MC_ENDIF();
8230 IEM_MC_END();
8231 }
8232}
8233
8234
8235/** Opcode 0x0f 0x8f. */
8236FNIEMOP_DEF(iemOp_jnle_Jv)
8237{
8238 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8239 IEMOP_HLP_MIN_386();
8240 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8241 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8242 {
8243 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8245
8246 IEM_MC_BEGIN(0, 0);
8247 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8248 IEM_MC_ADVANCE_RIP_AND_FINISH();
8249 } IEM_MC_ELSE() {
8250 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8251 } IEM_MC_ENDIF();
8252 IEM_MC_END();
8253 }
8254 else
8255 {
8256 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8258
8259 IEM_MC_BEGIN(0, 0);
8260 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8261 IEM_MC_ADVANCE_RIP_AND_FINISH();
8262 } IEM_MC_ELSE() {
8263 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8264 } IEM_MC_ENDIF();
8265 IEM_MC_END();
8266 }
8267}
8268
8269
8270/** Opcode 0x0f 0x90. */
8271FNIEMOP_DEF(iemOp_seto_Eb)
8272{
8273 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8274 IEMOP_HLP_MIN_386();
8275 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8276
8277 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8278 * any way. AMD says it's "unused", whatever that means. We're
8279 * ignoring for now. */
8280 if (IEM_IS_MODRM_REG_MODE(bRm))
8281 {
8282 /* register target */
8283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8284 IEM_MC_BEGIN(0, 0);
8285 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8286 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8287 } IEM_MC_ELSE() {
8288 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8289 } IEM_MC_ENDIF();
8290 IEM_MC_ADVANCE_RIP_AND_FINISH();
8291 IEM_MC_END();
8292 }
8293 else
8294 {
8295 /* memory target */
8296 IEM_MC_BEGIN(0, 1);
8297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8300 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8301 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8302 } IEM_MC_ELSE() {
8303 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8304 } IEM_MC_ENDIF();
8305 IEM_MC_ADVANCE_RIP_AND_FINISH();
8306 IEM_MC_END();
8307 }
8308}
8309
8310
8311/** Opcode 0x0f 0x91. */
8312FNIEMOP_DEF(iemOp_setno_Eb)
8313{
8314 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8315 IEMOP_HLP_MIN_386();
8316 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8317
8318 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8319 * any way. AMD says it's "unused", whatever that means. We're
8320 * ignoring for now. */
8321 if (IEM_IS_MODRM_REG_MODE(bRm))
8322 {
8323 /* register target */
8324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8325 IEM_MC_BEGIN(0, 0);
8326 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8327 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8328 } IEM_MC_ELSE() {
8329 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8330 } IEM_MC_ENDIF();
8331 IEM_MC_ADVANCE_RIP_AND_FINISH();
8332 IEM_MC_END();
8333 }
8334 else
8335 {
8336 /* memory target */
8337 IEM_MC_BEGIN(0, 1);
8338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8341 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8342 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8343 } IEM_MC_ELSE() {
8344 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8345 } IEM_MC_ENDIF();
8346 IEM_MC_ADVANCE_RIP_AND_FINISH();
8347 IEM_MC_END();
8348 }
8349}
8350
8351
8352/** Opcode 0x0f 0x92. */
8353FNIEMOP_DEF(iemOp_setc_Eb)
8354{
8355 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8356 IEMOP_HLP_MIN_386();
8357 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8358
8359 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8360 * any way. AMD says it's "unused", whatever that means. We're
8361 * ignoring for now. */
8362 if (IEM_IS_MODRM_REG_MODE(bRm))
8363 {
8364 /* register target */
8365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8366 IEM_MC_BEGIN(0, 0);
8367 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8368 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8369 } IEM_MC_ELSE() {
8370 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8371 } IEM_MC_ENDIF();
8372 IEM_MC_ADVANCE_RIP_AND_FINISH();
8373 IEM_MC_END();
8374 }
8375 else
8376 {
8377 /* memory target */
8378 IEM_MC_BEGIN(0, 1);
8379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8382 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8383 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8384 } IEM_MC_ELSE() {
8385 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8386 } IEM_MC_ENDIF();
8387 IEM_MC_ADVANCE_RIP_AND_FINISH();
8388 IEM_MC_END();
8389 }
8390}
8391
8392
8393/** Opcode 0x0f 0x93. */
8394FNIEMOP_DEF(iemOp_setnc_Eb)
8395{
8396 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8397 IEMOP_HLP_MIN_386();
8398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8399
8400 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8401 * any way. AMD says it's "unused", whatever that means. We're
8402 * ignoring for now. */
8403 if (IEM_IS_MODRM_REG_MODE(bRm))
8404 {
8405 /* register target */
8406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8407 IEM_MC_BEGIN(0, 0);
8408 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8409 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8410 } IEM_MC_ELSE() {
8411 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8412 } IEM_MC_ENDIF();
8413 IEM_MC_ADVANCE_RIP_AND_FINISH();
8414 IEM_MC_END();
8415 }
8416 else
8417 {
8418 /* memory target */
8419 IEM_MC_BEGIN(0, 1);
8420 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8423 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8424 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8425 } IEM_MC_ELSE() {
8426 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8427 } IEM_MC_ENDIF();
8428 IEM_MC_ADVANCE_RIP_AND_FINISH();
8429 IEM_MC_END();
8430 }
8431}
8432
8433
8434/** Opcode 0x0f 0x94. */
8435FNIEMOP_DEF(iemOp_sete_Eb)
8436{
8437 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8438 IEMOP_HLP_MIN_386();
8439 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8440
8441 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8442 * any way. AMD says it's "unused", whatever that means. We're
8443 * ignoring for now. */
8444 if (IEM_IS_MODRM_REG_MODE(bRm))
8445 {
8446 /* register target */
8447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8448 IEM_MC_BEGIN(0, 0);
8449 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8450 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8451 } IEM_MC_ELSE() {
8452 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8453 } IEM_MC_ENDIF();
8454 IEM_MC_ADVANCE_RIP_AND_FINISH();
8455 IEM_MC_END();
8456 }
8457 else
8458 {
8459 /* memory target */
8460 IEM_MC_BEGIN(0, 1);
8461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8464 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8465 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8466 } IEM_MC_ELSE() {
8467 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8468 } IEM_MC_ENDIF();
8469 IEM_MC_ADVANCE_RIP_AND_FINISH();
8470 IEM_MC_END();
8471 }
8472}
8473
8474
8475/** Opcode 0x0f 0x95. */
8476FNIEMOP_DEF(iemOp_setne_Eb)
8477{
8478 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8479 IEMOP_HLP_MIN_386();
8480 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8481
8482 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8483 * any way. AMD says it's "unused", whatever that means. We're
8484 * ignoring for now. */
8485 if (IEM_IS_MODRM_REG_MODE(bRm))
8486 {
8487 /* register target */
8488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8489 IEM_MC_BEGIN(0, 0);
8490 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8491 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8492 } IEM_MC_ELSE() {
8493 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8494 } IEM_MC_ENDIF();
8495 IEM_MC_ADVANCE_RIP_AND_FINISH();
8496 IEM_MC_END();
8497 }
8498 else
8499 {
8500 /* memory target */
8501 IEM_MC_BEGIN(0, 1);
8502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8505 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8506 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8507 } IEM_MC_ELSE() {
8508 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8509 } IEM_MC_ENDIF();
8510 IEM_MC_ADVANCE_RIP_AND_FINISH();
8511 IEM_MC_END();
8512 }
8513}
8514
8515
8516/** Opcode 0x0f 0x96. */
8517FNIEMOP_DEF(iemOp_setbe_Eb)
8518{
8519 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8520 IEMOP_HLP_MIN_386();
8521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8522
8523 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8524 * any way. AMD says it's "unused", whatever that means. We're
8525 * ignoring for now. */
8526 if (IEM_IS_MODRM_REG_MODE(bRm))
8527 {
8528 /* register target */
8529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8530 IEM_MC_BEGIN(0, 0);
8531 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8532 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8533 } IEM_MC_ELSE() {
8534 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8535 } IEM_MC_ENDIF();
8536 IEM_MC_ADVANCE_RIP_AND_FINISH();
8537 IEM_MC_END();
8538 }
8539 else
8540 {
8541 /* memory target */
8542 IEM_MC_BEGIN(0, 1);
8543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8546 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8547 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8548 } IEM_MC_ELSE() {
8549 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8550 } IEM_MC_ENDIF();
8551 IEM_MC_ADVANCE_RIP_AND_FINISH();
8552 IEM_MC_END();
8553 }
8554}
8555
8556
8557/** Opcode 0x0f 0x97. */
8558FNIEMOP_DEF(iemOp_setnbe_Eb)
8559{
8560 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8561 IEMOP_HLP_MIN_386();
8562 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8563
8564 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8565 * any way. AMD says it's "unused", whatever that means. We're
8566 * ignoring for now. */
8567 if (IEM_IS_MODRM_REG_MODE(bRm))
8568 {
8569 /* register target */
8570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8571 IEM_MC_BEGIN(0, 0);
8572 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8573 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8574 } IEM_MC_ELSE() {
8575 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8576 } IEM_MC_ENDIF();
8577 IEM_MC_ADVANCE_RIP_AND_FINISH();
8578 IEM_MC_END();
8579 }
8580 else
8581 {
8582 /* memory target */
8583 IEM_MC_BEGIN(0, 1);
8584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8587 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8588 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8589 } IEM_MC_ELSE() {
8590 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8591 } IEM_MC_ENDIF();
8592 IEM_MC_ADVANCE_RIP_AND_FINISH();
8593 IEM_MC_END();
8594 }
8595}
8596
8597
8598/** Opcode 0x0f 0x98. */
8599FNIEMOP_DEF(iemOp_sets_Eb)
8600{
8601 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8602 IEMOP_HLP_MIN_386();
8603 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8604
8605 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8606 * any way. AMD says it's "unused", whatever that means. We're
8607 * ignoring for now. */
8608 if (IEM_IS_MODRM_REG_MODE(bRm))
8609 {
8610 /* register target */
8611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8612 IEM_MC_BEGIN(0, 0);
8613 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8614 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8615 } IEM_MC_ELSE() {
8616 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8617 } IEM_MC_ENDIF();
8618 IEM_MC_ADVANCE_RIP_AND_FINISH();
8619 IEM_MC_END();
8620 }
8621 else
8622 {
8623 /* memory target */
8624 IEM_MC_BEGIN(0, 1);
8625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8628 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8629 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8630 } IEM_MC_ELSE() {
8631 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8632 } IEM_MC_ENDIF();
8633 IEM_MC_ADVANCE_RIP_AND_FINISH();
8634 IEM_MC_END();
8635 }
8636}
8637
8638
8639/** Opcode 0x0f 0x99. */
8640FNIEMOP_DEF(iemOp_setns_Eb)
8641{
8642 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8643 IEMOP_HLP_MIN_386();
8644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8645
8646 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8647 * any way. AMD says it's "unused", whatever that means. We're
8648 * ignoring for now. */
8649 if (IEM_IS_MODRM_REG_MODE(bRm))
8650 {
8651 /* register target */
8652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8653 IEM_MC_BEGIN(0, 0);
8654 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8655 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8656 } IEM_MC_ELSE() {
8657 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8658 } IEM_MC_ENDIF();
8659 IEM_MC_ADVANCE_RIP_AND_FINISH();
8660 IEM_MC_END();
8661 }
8662 else
8663 {
8664 /* memory target */
8665 IEM_MC_BEGIN(0, 1);
8666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8669 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8670 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8671 } IEM_MC_ELSE() {
8672 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8673 } IEM_MC_ENDIF();
8674 IEM_MC_ADVANCE_RIP_AND_FINISH();
8675 IEM_MC_END();
8676 }
8677}
8678
8679
8680/** Opcode 0x0f 0x9a. */
8681FNIEMOP_DEF(iemOp_setp_Eb)
8682{
8683 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8684 IEMOP_HLP_MIN_386();
8685 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8686
8687 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8688 * any way. AMD says it's "unused", whatever that means. We're
8689 * ignoring for now. */
8690 if (IEM_IS_MODRM_REG_MODE(bRm))
8691 {
8692 /* register target */
8693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8694 IEM_MC_BEGIN(0, 0);
8695 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8696 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8697 } IEM_MC_ELSE() {
8698 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8699 } IEM_MC_ENDIF();
8700 IEM_MC_ADVANCE_RIP_AND_FINISH();
8701 IEM_MC_END();
8702 }
8703 else
8704 {
8705 /* memory target */
8706 IEM_MC_BEGIN(0, 1);
8707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8710 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8711 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8712 } IEM_MC_ELSE() {
8713 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8714 } IEM_MC_ENDIF();
8715 IEM_MC_ADVANCE_RIP_AND_FINISH();
8716 IEM_MC_END();
8717 }
8718}
8719
8720
8721/** Opcode 0x0f 0x9b. */
8722FNIEMOP_DEF(iemOp_setnp_Eb)
8723{
8724 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8725 IEMOP_HLP_MIN_386();
8726 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8727
8728 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8729 * any way. AMD says it's "unused", whatever that means. We're
8730 * ignoring for now. */
8731 if (IEM_IS_MODRM_REG_MODE(bRm))
8732 {
8733 /* register target */
8734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8735 IEM_MC_BEGIN(0, 0);
8736 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8737 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8738 } IEM_MC_ELSE() {
8739 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8740 } IEM_MC_ENDIF();
8741 IEM_MC_ADVANCE_RIP_AND_FINISH();
8742 IEM_MC_END();
8743 }
8744 else
8745 {
8746 /* memory target */
8747 IEM_MC_BEGIN(0, 1);
8748 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8751 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8752 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8753 } IEM_MC_ELSE() {
8754 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8755 } IEM_MC_ENDIF();
8756 IEM_MC_ADVANCE_RIP_AND_FINISH();
8757 IEM_MC_END();
8758 }
8759}
8760
8761
8762/** Opcode 0x0f 0x9c. */
8763FNIEMOP_DEF(iemOp_setl_Eb)
8764{
8765 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8766 IEMOP_HLP_MIN_386();
8767 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8768
8769 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8770 * any way. AMD says it's "unused", whatever that means. We're
8771 * ignoring for now. */
8772 if (IEM_IS_MODRM_REG_MODE(bRm))
8773 {
8774 /* register target */
8775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8776 IEM_MC_BEGIN(0, 0);
8777 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8778 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8779 } IEM_MC_ELSE() {
8780 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8781 } IEM_MC_ENDIF();
8782 IEM_MC_ADVANCE_RIP_AND_FINISH();
8783 IEM_MC_END();
8784 }
8785 else
8786 {
8787 /* memory target */
8788 IEM_MC_BEGIN(0, 1);
8789 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8792 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8793 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8794 } IEM_MC_ELSE() {
8795 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8796 } IEM_MC_ENDIF();
8797 IEM_MC_ADVANCE_RIP_AND_FINISH();
8798 IEM_MC_END();
8799 }
8800}
8801
8802
8803/** Opcode 0x0f 0x9d. */
8804FNIEMOP_DEF(iemOp_setnl_Eb)
8805{
8806 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8807 IEMOP_HLP_MIN_386();
8808 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8809
8810 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8811 * any way. AMD says it's "unused", whatever that means. We're
8812 * ignoring for now. */
8813 if (IEM_IS_MODRM_REG_MODE(bRm))
8814 {
8815 /* register target */
8816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8817 IEM_MC_BEGIN(0, 0);
8818 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8819 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8820 } IEM_MC_ELSE() {
8821 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8822 } IEM_MC_ENDIF();
8823 IEM_MC_ADVANCE_RIP_AND_FINISH();
8824 IEM_MC_END();
8825 }
8826 else
8827 {
8828 /* memory target */
8829 IEM_MC_BEGIN(0, 1);
8830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8833 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8834 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8835 } IEM_MC_ELSE() {
8836 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8837 } IEM_MC_ENDIF();
8838 IEM_MC_ADVANCE_RIP_AND_FINISH();
8839 IEM_MC_END();
8840 }
8841}
8842
8843
8844/** Opcode 0x0f 0x9e. */
8845FNIEMOP_DEF(iemOp_setle_Eb)
8846{
8847 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8848 IEMOP_HLP_MIN_386();
8849 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8850
8851 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8852 * any way. AMD says it's "unused", whatever that means. We're
8853 * ignoring for now. */
8854 if (IEM_IS_MODRM_REG_MODE(bRm))
8855 {
8856 /* register target */
8857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8858 IEM_MC_BEGIN(0, 0);
8859 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8860 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8861 } IEM_MC_ELSE() {
8862 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8863 } IEM_MC_ENDIF();
8864 IEM_MC_ADVANCE_RIP_AND_FINISH();
8865 IEM_MC_END();
8866 }
8867 else
8868 {
8869 /* memory target */
8870 IEM_MC_BEGIN(0, 1);
8871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8874 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8875 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8876 } IEM_MC_ELSE() {
8877 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8878 } IEM_MC_ENDIF();
8879 IEM_MC_ADVANCE_RIP_AND_FINISH();
8880 IEM_MC_END();
8881 }
8882}
8883
8884
8885/** Opcode 0x0f 0x9f. */
8886FNIEMOP_DEF(iemOp_setnle_Eb)
8887{
8888 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8889 IEMOP_HLP_MIN_386();
8890 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8891
8892 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8893 * any way. AMD says it's "unused", whatever that means. We're
8894 * ignoring for now. */
8895 if (IEM_IS_MODRM_REG_MODE(bRm))
8896 {
8897 /* register target */
8898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8899 IEM_MC_BEGIN(0, 0);
8900 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8901 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8902 } IEM_MC_ELSE() {
8903 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8904 } IEM_MC_ENDIF();
8905 IEM_MC_ADVANCE_RIP_AND_FINISH();
8906 IEM_MC_END();
8907 }
8908 else
8909 {
8910 /* memory target */
8911 IEM_MC_BEGIN(0, 1);
8912 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8915 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8916 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8917 } IEM_MC_ELSE() {
8918 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8919 } IEM_MC_ENDIF();
8920 IEM_MC_ADVANCE_RIP_AND_FINISH();
8921 IEM_MC_END();
8922 }
8923}
8924
8925
8926/**
8927 * Common 'push segment-register' helper.
8928 */
8929FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
8930{
8931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8932 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
8933 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8934
8935 switch (pVCpu->iem.s.enmEffOpSize)
8936 {
8937 case IEMMODE_16BIT:
8938 IEM_MC_BEGIN(0, 1);
8939 IEM_MC_LOCAL(uint16_t, u16Value);
8940 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
8941 IEM_MC_PUSH_U16(u16Value);
8942 IEM_MC_ADVANCE_RIP_AND_FINISH();
8943 IEM_MC_END();
8944 break;
8945
8946 case IEMMODE_32BIT:
8947 IEM_MC_BEGIN(0, 1);
8948 IEM_MC_LOCAL(uint32_t, u32Value);
8949 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
8950 IEM_MC_PUSH_U32_SREG(u32Value);
8951 IEM_MC_ADVANCE_RIP_AND_FINISH();
8952 IEM_MC_END();
8953 break;
8954
8955 case IEMMODE_64BIT:
8956 IEM_MC_BEGIN(0, 1);
8957 IEM_MC_LOCAL(uint64_t, u64Value);
8958 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
8959 IEM_MC_PUSH_U64(u64Value);
8960 IEM_MC_ADVANCE_RIP_AND_FINISH();
8961 IEM_MC_END();
8962 break;
8963
8964 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8965 }
8966}
8967
8968
8969/** Opcode 0x0f 0xa0. */
8970FNIEMOP_DEF(iemOp_push_fs)
8971{
8972 IEMOP_MNEMONIC(push_fs, "push fs");
8973 IEMOP_HLP_MIN_386();
8974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8975 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8976}
8977
8978
8979/** Opcode 0x0f 0xa1. */
8980FNIEMOP_DEF(iemOp_pop_fs)
8981{
8982 IEMOP_MNEMONIC(pop_fs, "pop fs");
8983 IEMOP_HLP_MIN_386();
8984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8985 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8986}
8987
8988
8989/** Opcode 0x0f 0xa2. */
8990FNIEMOP_DEF(iemOp_cpuid)
8991{
8992 IEMOP_MNEMONIC(cpuid, "cpuid");
8993 IEMOP_HLP_MIN_486(); /* not all 486es. */
8994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8995 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
8996}
8997
8998
8999/**
9000 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
9001 * iemOp_bts_Ev_Gv.
9002 */
9003FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
9004{
9005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9006 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9007
9008 if (IEM_IS_MODRM_REG_MODE(bRm))
9009 {
9010 /* register destination. */
9011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9012 switch (pVCpu->iem.s.enmEffOpSize)
9013 {
9014 case IEMMODE_16BIT:
9015 IEM_MC_BEGIN(3, 0);
9016 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9017 IEM_MC_ARG(uint16_t, u16Src, 1);
9018 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9019
9020 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9021 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
9022 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9023 IEM_MC_REF_EFLAGS(pEFlags);
9024 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9025
9026 IEM_MC_ADVANCE_RIP_AND_FINISH();
9027 IEM_MC_END();
9028 break;
9029
9030 case IEMMODE_32BIT:
9031 IEM_MC_BEGIN(3, 0);
9032 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9033 IEM_MC_ARG(uint32_t, u32Src, 1);
9034 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9035
9036 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9037 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
9038 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9039 IEM_MC_REF_EFLAGS(pEFlags);
9040 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9041
9042 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9043 IEM_MC_ADVANCE_RIP_AND_FINISH();
9044 IEM_MC_END();
9045 break;
9046
9047 case IEMMODE_64BIT:
9048 IEM_MC_BEGIN(3, 0);
9049 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9050 IEM_MC_ARG(uint64_t, u64Src, 1);
9051 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9052
9053 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9054 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
9055 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9056 IEM_MC_REF_EFLAGS(pEFlags);
9057 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9058
9059 IEM_MC_ADVANCE_RIP_AND_FINISH();
9060 IEM_MC_END();
9061 break;
9062
9063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9064 }
9065 }
9066 else
9067 {
9068 /* memory destination. */
9069
9070 uint32_t fAccess;
9071 if (pImpl->pfnLockedU16)
9072 fAccess = IEM_ACCESS_DATA_RW;
9073 else /* BT */
9074 fAccess = IEM_ACCESS_DATA_R;
9075
9076 /** @todo test negative bit offsets! */
9077 switch (pVCpu->iem.s.enmEffOpSize)
9078 {
9079 case IEMMODE_16BIT:
9080 IEM_MC_BEGIN(3, 2);
9081 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9082 IEM_MC_ARG(uint16_t, u16Src, 1);
9083 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9085 IEM_MC_LOCAL(int16_t, i16AddrAdj);
9086
9087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9088 if (pImpl->pfnLockedU16)
9089 IEMOP_HLP_DONE_DECODING();
9090 else
9091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9092 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9093 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
9094 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
9095 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
9096 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
9097 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
9098 IEM_MC_FETCH_EFLAGS(EFlags);
9099
9100 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9101 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9102 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9103 else
9104 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9105 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9106
9107 IEM_MC_COMMIT_EFLAGS(EFlags);
9108 IEM_MC_ADVANCE_RIP_AND_FINISH();
9109 IEM_MC_END();
9110 break;
9111
9112 case IEMMODE_32BIT:
9113 IEM_MC_BEGIN(3, 2);
9114 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9115 IEM_MC_ARG(uint32_t, u32Src, 1);
9116 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9118 IEM_MC_LOCAL(int32_t, i32AddrAdj);
9119
9120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9121 if (pImpl->pfnLockedU16)
9122 IEMOP_HLP_DONE_DECODING();
9123 else
9124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9125 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9126 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
9127 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
9128 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
9129 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
9130 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
9131 IEM_MC_FETCH_EFLAGS(EFlags);
9132
9133 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9134 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9135 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9136 else
9137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9138 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9139
9140 IEM_MC_COMMIT_EFLAGS(EFlags);
9141 IEM_MC_ADVANCE_RIP_AND_FINISH();
9142 IEM_MC_END();
9143 break;
9144
9145 case IEMMODE_64BIT:
9146 IEM_MC_BEGIN(3, 2);
9147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9148 IEM_MC_ARG(uint64_t, u64Src, 1);
9149 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9151 IEM_MC_LOCAL(int64_t, i64AddrAdj);
9152
9153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9154 if (pImpl->pfnLockedU16)
9155 IEMOP_HLP_DONE_DECODING();
9156 else
9157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9158 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9159 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
9160 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
9161 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
9162 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
9163 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
9164 IEM_MC_FETCH_EFLAGS(EFlags);
9165
9166 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9167 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9168 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9169 else
9170 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9171 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9172
9173 IEM_MC_COMMIT_EFLAGS(EFlags);
9174 IEM_MC_ADVANCE_RIP_AND_FINISH();
9175 IEM_MC_END();
9176 break;
9177
9178 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9179 }
9180 }
9181}
9182
9183
9184/** Opcode 0x0f 0xa3. */
9185FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9186{
9187 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9188 IEMOP_HLP_MIN_386();
9189 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
9190}
9191
9192
9193/**
9194 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9195 */
9196FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
9197{
9198 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9199 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9200
9201 if (IEM_IS_MODRM_REG_MODE(bRm))
9202 {
9203 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9205
9206 switch (pVCpu->iem.s.enmEffOpSize)
9207 {
9208 case IEMMODE_16BIT:
9209 IEM_MC_BEGIN(4, 0);
9210 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9211 IEM_MC_ARG(uint16_t, u16Src, 1);
9212 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9213 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9214
9215 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9216 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9217 IEM_MC_REF_EFLAGS(pEFlags);
9218 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9219
9220 IEM_MC_ADVANCE_RIP_AND_FINISH();
9221 IEM_MC_END();
9222 break;
9223
9224 case IEMMODE_32BIT:
9225 IEM_MC_BEGIN(4, 0);
9226 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9227 IEM_MC_ARG(uint32_t, u32Src, 1);
9228 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9229 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9230
9231 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9232 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9233 IEM_MC_REF_EFLAGS(pEFlags);
9234 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9235
9236 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9237 IEM_MC_ADVANCE_RIP_AND_FINISH();
9238 IEM_MC_END();
9239 break;
9240
9241 case IEMMODE_64BIT:
9242 IEM_MC_BEGIN(4, 0);
9243 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9244 IEM_MC_ARG(uint64_t, u64Src, 1);
9245 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9246 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9247
9248 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9249 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9250 IEM_MC_REF_EFLAGS(pEFlags);
9251 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9252
9253 IEM_MC_ADVANCE_RIP_AND_FINISH();
9254 IEM_MC_END();
9255 break;
9256
9257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9258 }
9259 }
9260 else
9261 {
9262 switch (pVCpu->iem.s.enmEffOpSize)
9263 {
9264 case IEMMODE_16BIT:
9265 IEM_MC_BEGIN(4, 2);
9266 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9267 IEM_MC_ARG(uint16_t, u16Src, 1);
9268 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9269 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9271
9272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9273 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9274 IEM_MC_ASSIGN(cShiftArg, cShift);
9275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9276 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9277 IEM_MC_FETCH_EFLAGS(EFlags);
9278 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9279 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9280
9281 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9282 IEM_MC_COMMIT_EFLAGS(EFlags);
9283 IEM_MC_ADVANCE_RIP_AND_FINISH();
9284 IEM_MC_END();
9285 break;
9286
9287 case IEMMODE_32BIT:
9288 IEM_MC_BEGIN(4, 2);
9289 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9290 IEM_MC_ARG(uint32_t, u32Src, 1);
9291 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9292 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9294
9295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9296 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9297 IEM_MC_ASSIGN(cShiftArg, cShift);
9298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9299 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9300 IEM_MC_FETCH_EFLAGS(EFlags);
9301 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9302 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9303
9304 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9305 IEM_MC_COMMIT_EFLAGS(EFlags);
9306 IEM_MC_ADVANCE_RIP_AND_FINISH();
9307 IEM_MC_END();
9308 break;
9309
9310 case IEMMODE_64BIT:
9311 IEM_MC_BEGIN(4, 2);
9312 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9313 IEM_MC_ARG(uint64_t, u64Src, 1);
9314 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9315 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9317
9318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9319 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9320 IEM_MC_ASSIGN(cShiftArg, cShift);
9321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9322 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9323 IEM_MC_FETCH_EFLAGS(EFlags);
9324 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9325 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9326
9327 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9328 IEM_MC_COMMIT_EFLAGS(EFlags);
9329 IEM_MC_ADVANCE_RIP_AND_FINISH();
9330 IEM_MC_END();
9331 break;
9332
9333 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9334 }
9335 }
9336}
9337
9338
9339/**
9340 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9341 */
9342FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
9343{
9344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9345 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9346
9347 if (IEM_IS_MODRM_REG_MODE(bRm))
9348 {
9349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9350
9351 switch (pVCpu->iem.s.enmEffOpSize)
9352 {
9353 case IEMMODE_16BIT:
9354 IEM_MC_BEGIN(4, 0);
9355 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9356 IEM_MC_ARG(uint16_t, u16Src, 1);
9357 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9358 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9359
9360 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9361 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9362 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9363 IEM_MC_REF_EFLAGS(pEFlags);
9364 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9365
9366 IEM_MC_ADVANCE_RIP_AND_FINISH();
9367 IEM_MC_END();
9368 break;
9369
9370 case IEMMODE_32BIT:
9371 IEM_MC_BEGIN(4, 0);
9372 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9373 IEM_MC_ARG(uint32_t, u32Src, 1);
9374 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9375 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9376
9377 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9378 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9379 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9380 IEM_MC_REF_EFLAGS(pEFlags);
9381 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9382
9383 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9384 IEM_MC_ADVANCE_RIP_AND_FINISH();
9385 IEM_MC_END();
9386 break;
9387
9388 case IEMMODE_64BIT:
9389 IEM_MC_BEGIN(4, 0);
9390 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9391 IEM_MC_ARG(uint64_t, u64Src, 1);
9392 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9393 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9394
9395 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9396 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9397 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9398 IEM_MC_REF_EFLAGS(pEFlags);
9399 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9400
9401 IEM_MC_ADVANCE_RIP_AND_FINISH();
9402 IEM_MC_END();
9403 break;
9404
9405 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9406 }
9407 }
9408 else
9409 {
9410 switch (pVCpu->iem.s.enmEffOpSize)
9411 {
9412 case IEMMODE_16BIT:
9413 IEM_MC_BEGIN(4, 2);
9414 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9415 IEM_MC_ARG(uint16_t, u16Src, 1);
9416 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9417 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9418 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9419
9420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9422 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9423 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9424 IEM_MC_FETCH_EFLAGS(EFlags);
9425 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9426 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9427
9428 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9429 IEM_MC_COMMIT_EFLAGS(EFlags);
9430 IEM_MC_ADVANCE_RIP_AND_FINISH();
9431 IEM_MC_END();
9432 break;
9433
9434 case IEMMODE_32BIT:
9435 IEM_MC_BEGIN(4, 2);
9436 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9437 IEM_MC_ARG(uint32_t, u32Src, 1);
9438 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9439 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9441
9442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9444 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9445 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9446 IEM_MC_FETCH_EFLAGS(EFlags);
9447 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9448 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9449
9450 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9451 IEM_MC_COMMIT_EFLAGS(EFlags);
9452 IEM_MC_ADVANCE_RIP_AND_FINISH();
9453 IEM_MC_END();
9454 break;
9455
9456 case IEMMODE_64BIT:
9457 IEM_MC_BEGIN(4, 2);
9458 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9459 IEM_MC_ARG(uint64_t, u64Src, 1);
9460 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9461 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9463
9464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9466 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9467 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9468 IEM_MC_FETCH_EFLAGS(EFlags);
9469 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9470 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9471
9472 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9473 IEM_MC_COMMIT_EFLAGS(EFlags);
9474 IEM_MC_ADVANCE_RIP_AND_FINISH();
9475 IEM_MC_END();
9476 break;
9477
9478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9479 }
9480 }
9481}
9482
9483
9484
9485/** Opcode 0x0f 0xa4. */
9486FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9487{
9488 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9489 IEMOP_HLP_MIN_386();
9490 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9491}
9492
9493
9494/** Opcode 0x0f 0xa5. */
9495FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9496{
9497 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9498 IEMOP_HLP_MIN_386();
9499 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9500}
9501
9502
9503/** Opcode 0x0f 0xa8. */
9504FNIEMOP_DEF(iemOp_push_gs)
9505{
9506 IEMOP_MNEMONIC(push_gs, "push gs");
9507 IEMOP_HLP_MIN_386();
9508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9509 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9510}
9511
9512
9513/** Opcode 0x0f 0xa9. */
9514FNIEMOP_DEF(iemOp_pop_gs)
9515{
9516 IEMOP_MNEMONIC(pop_gs, "pop gs");
9517 IEMOP_HLP_MIN_386();
9518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9519 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9520}
9521
9522
9523/** Opcode 0x0f 0xaa. */
9524FNIEMOP_DEF(iemOp_rsm)
9525{
9526 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9527 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9529 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
9530}
9531
9532
9533
9534/** Opcode 0x0f 0xab. */
9535FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9536{
9537 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9538 IEMOP_HLP_MIN_386();
9539 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
9540}
9541
9542
9543/** Opcode 0x0f 0xac. */
9544FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9545{
9546 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9547 IEMOP_HLP_MIN_386();
9548 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9549}
9550
9551
9552/** Opcode 0x0f 0xad. */
9553FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9554{
9555 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9556 IEMOP_HLP_MIN_386();
9557 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9558}
9559
9560
9561/** Opcode 0x0f 0xae mem/0. */
9562FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9563{
9564 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9565 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9566 return IEMOP_RAISE_INVALID_OPCODE();
9567
9568 IEM_MC_BEGIN(3, 1);
9569 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9570 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9571 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9574 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9575 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9576 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9577 IEM_MC_END();
9578 return VINF_SUCCESS;
9579}
9580
9581
9582/** Opcode 0x0f 0xae mem/1. */
9583FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9584{
9585 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9586 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9587 return IEMOP_RAISE_INVALID_OPCODE();
9588
9589 IEM_MC_BEGIN(3, 1);
9590 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9591 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9592 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9595 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9596 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9597 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9598 IEM_MC_END();
9599 return VINF_SUCCESS;
9600}
9601
9602
9603/**
9604 * @opmaps grp15
9605 * @opcode !11/2
9606 * @oppfx none
9607 * @opcpuid sse
9608 * @opgroup og_sse_mxcsrsm
9609 * @opxcpttype 5
9610 * @optest op1=0 -> mxcsr=0
9611 * @optest op1=0x2083 -> mxcsr=0x2083
9612 * @optest op1=0xfffffffe -> value.xcpt=0xd
9613 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9614 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9615 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9616 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9617 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9618 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9619 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9620 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9621 */
9622FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9623{
9624 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9625 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9626 return IEMOP_RAISE_INVALID_OPCODE();
9627
9628 IEM_MC_BEGIN(2, 0);
9629 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9630 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9633 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9634 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9635 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9636 IEM_MC_END();
9637 return VINF_SUCCESS;
9638}
9639
9640
9641/**
9642 * @opmaps grp15
9643 * @opcode !11/3
9644 * @oppfx none
9645 * @opcpuid sse
9646 * @opgroup og_sse_mxcsrsm
9647 * @opxcpttype 5
9648 * @optest mxcsr=0 -> op1=0
9649 * @optest mxcsr=0x2083 -> op1=0x2083
9650 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9651 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9652 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9653 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9654 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9655 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9656 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9657 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9658 */
9659FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9660{
9661 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9662 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9663 return IEMOP_RAISE_INVALID_OPCODE();
9664
9665 IEM_MC_BEGIN(2, 0);
9666 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9667 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9670 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9671 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9672 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9673 IEM_MC_END();
9674 return VINF_SUCCESS;
9675}
9676
9677
9678/**
9679 * @opmaps grp15
9680 * @opcode !11/4
9681 * @oppfx none
9682 * @opcpuid xsave
9683 * @opgroup og_system
9684 * @opxcpttype none
9685 */
9686FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9687{
9688 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9689 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9690 return IEMOP_RAISE_INVALID_OPCODE();
9691
9692 IEM_MC_BEGIN(3, 0);
9693 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9694 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9695 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9698 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9699 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9700 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9701 IEM_MC_END();
9702 return VINF_SUCCESS;
9703}
9704
9705
9706/**
9707 * @opmaps grp15
9708 * @opcode !11/5
9709 * @oppfx none
9710 * @opcpuid xsave
9711 * @opgroup og_system
9712 * @opxcpttype none
9713 */
9714FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9715{
9716 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9717 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9718 return IEMOP_RAISE_INVALID_OPCODE();
9719
9720 IEM_MC_BEGIN(3, 0);
9721 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9722 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9723 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9726 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9727 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9728 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9729 IEM_MC_END();
9730 return VINF_SUCCESS;
9731}
9732
9733/** Opcode 0x0f 0xae mem/6. */
9734FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9735
9736/**
9737 * @opmaps grp15
9738 * @opcode !11/7
9739 * @oppfx none
9740 * @opcpuid clfsh
9741 * @opgroup og_cachectl
9742 * @optest op1=1 ->
9743 */
9744FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9745{
9746 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9747 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9748 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9749
9750 IEM_MC_BEGIN(2, 0);
9751 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9752 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9755 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9756 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9757 IEM_MC_END();
9758 return VINF_SUCCESS;
9759}
9760
9761/**
9762 * @opmaps grp15
9763 * @opcode !11/7
9764 * @oppfx 0x66
9765 * @opcpuid clflushopt
9766 * @opgroup og_cachectl
9767 * @optest op1=1 ->
9768 */
9769FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9770{
9771 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9772 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9773 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9774
9775 IEM_MC_BEGIN(2, 0);
9776 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9777 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9780 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9781 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9782 IEM_MC_END();
9783 return VINF_SUCCESS;
9784}
9785
9786
9787/** Opcode 0x0f 0xae 11b/5. */
9788FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9789{
9790 RT_NOREF_PV(bRm);
9791 IEMOP_MNEMONIC(lfence, "lfence");
9792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9793 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9794 return IEMOP_RAISE_INVALID_OPCODE();
9795
9796 IEM_MC_BEGIN(0, 0);
9797#ifndef RT_ARCH_ARM64
9798 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9799#endif
9800 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9801#ifndef RT_ARCH_ARM64
9802 else
9803 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9804#endif
9805 IEM_MC_ADVANCE_RIP_AND_FINISH();
9806 IEM_MC_END();
9807}
9808
9809
9810/** Opcode 0x0f 0xae 11b/6. */
9811FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9812{
9813 RT_NOREF_PV(bRm);
9814 IEMOP_MNEMONIC(mfence, "mfence");
9815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9816 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9817 return IEMOP_RAISE_INVALID_OPCODE();
9818
9819 IEM_MC_BEGIN(0, 0);
9820#ifndef RT_ARCH_ARM64
9821 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9822#endif
9823 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9824#ifndef RT_ARCH_ARM64
9825 else
9826 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9827#endif
9828 IEM_MC_ADVANCE_RIP_AND_FINISH();
9829 IEM_MC_END();
9830}
9831
9832
9833/** Opcode 0x0f 0xae 11b/7. */
9834FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9835{
9836 RT_NOREF_PV(bRm);
9837 IEMOP_MNEMONIC(sfence, "sfence");
9838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9839 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9840 return IEMOP_RAISE_INVALID_OPCODE();
9841
9842 IEM_MC_BEGIN(0, 0);
9843#ifndef RT_ARCH_ARM64
9844 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9845#endif
9846 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9847#ifndef RT_ARCH_ARM64
9848 else
9849 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9850#endif
9851 IEM_MC_ADVANCE_RIP_AND_FINISH();
9852 IEM_MC_END();
9853}
9854
9855
9856/** Opcode 0xf3 0x0f 0xae 11b/0. */
9857FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9858{
9859 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9861 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9862 {
9863 IEM_MC_BEGIN(1, 0);
9864 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9865 IEM_MC_ARG(uint64_t, u64Dst, 0);
9866 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9867 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9868 IEM_MC_ADVANCE_RIP_AND_FINISH();
9869 IEM_MC_END();
9870 }
9871 else
9872 {
9873 IEM_MC_BEGIN(1, 0);
9874 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9875 IEM_MC_ARG(uint32_t, u32Dst, 0);
9876 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
9877 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9878 IEM_MC_ADVANCE_RIP_AND_FINISH();
9879 IEM_MC_END();
9880 }
9881}
9882
9883
9884/** Opcode 0xf3 0x0f 0xae 11b/1. */
9885FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
9886{
9887 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
9888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9889 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9890 {
9891 IEM_MC_BEGIN(1, 0);
9892 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9893 IEM_MC_ARG(uint64_t, u64Dst, 0);
9894 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
9895 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9896 IEM_MC_ADVANCE_RIP_AND_FINISH();
9897 IEM_MC_END();
9898 }
9899 else
9900 {
9901 IEM_MC_BEGIN(1, 0);
9902 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9903 IEM_MC_ARG(uint32_t, u32Dst, 0);
9904 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
9905 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9906 IEM_MC_ADVANCE_RIP_AND_FINISH();
9907 IEM_MC_END();
9908 }
9909}
9910
9911
9912/** Opcode 0xf3 0x0f 0xae 11b/2. */
9913FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
9914{
9915 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
9916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9917 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9918 {
9919 IEM_MC_BEGIN(1, 0);
9920 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9921 IEM_MC_ARG(uint64_t, u64Dst, 0);
9922 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9923 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9924 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
9925 IEM_MC_ADVANCE_RIP_AND_FINISH();
9926 IEM_MC_END();
9927 }
9928 else
9929 {
9930 IEM_MC_BEGIN(1, 0);
9931 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9932 IEM_MC_ARG(uint32_t, u32Dst, 0);
9933 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9934 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
9935 IEM_MC_ADVANCE_RIP_AND_FINISH();
9936 IEM_MC_END();
9937 }
9938}
9939
9940
9941/** Opcode 0xf3 0x0f 0xae 11b/3. */
9942FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
9943{
9944 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
9945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9946 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9947 {
9948 IEM_MC_BEGIN(1, 0);
9949 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9950 IEM_MC_ARG(uint64_t, u64Dst, 0);
9951 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9952 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9953 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
9954 IEM_MC_ADVANCE_RIP_AND_FINISH();
9955 IEM_MC_END();
9956 }
9957 else
9958 {
9959 IEM_MC_BEGIN(1, 0);
9960 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9961 IEM_MC_ARG(uint32_t, u32Dst, 0);
9962 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9963 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
9964 IEM_MC_ADVANCE_RIP_AND_FINISH();
9965 IEM_MC_END();
9966 }
9967}
9968
9969
9970/**
9971 * Group 15 jump table for register variant.
9972 */
9973IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
9974{ /* pfx: none, 066h, 0f3h, 0f2h */
9975 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
9976 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
9977 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
9978 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
9979 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9980 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9981 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9982 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9983};
9984AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
9985
9986
9987/**
9988 * Group 15 jump table for memory variant.
9989 */
9990IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
9991{ /* pfx: none, 066h, 0f3h, 0f2h */
9992 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9993 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9994 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9995 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9996 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9997 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9998 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9999 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10000};
10001AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10002
10003
10004/** Opcode 0x0f 0xae. */
10005FNIEMOP_DEF(iemOp_Grp15)
10006{
10007 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10008 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10009 if (IEM_IS_MODRM_REG_MODE(bRm))
10010 /* register, register */
10011 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10012 + pVCpu->iem.s.idxPrefix], bRm);
10013 /* memory, register */
10014 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10015 + pVCpu->iem.s.idxPrefix], bRm);
10016}
10017
10018
10019/** Opcode 0x0f 0xaf. */
10020FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10021{
10022 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10023 IEMOP_HLP_MIN_386();
10024 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10025 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags));
10026}
10027
10028
10029/** Opcode 0x0f 0xb0. */
10030FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10031{
10032 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10033 IEMOP_HLP_MIN_486();
10034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10035
10036 if (IEM_IS_MODRM_REG_MODE(bRm))
10037 {
10038 IEMOP_HLP_DONE_DECODING();
10039 IEM_MC_BEGIN(4, 0);
10040 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10041 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10042 IEM_MC_ARG(uint8_t, u8Src, 2);
10043 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10044
10045 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10046 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10047 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10048 IEM_MC_REF_EFLAGS(pEFlags);
10049 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10050 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10051 else
10052 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10053
10054 IEM_MC_ADVANCE_RIP_AND_FINISH();
10055 IEM_MC_END();
10056 }
10057 else
10058 {
10059 IEM_MC_BEGIN(4, 3);
10060 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10061 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10062 IEM_MC_ARG(uint8_t, u8Src, 2);
10063 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10065 IEM_MC_LOCAL(uint8_t, u8Al);
10066
10067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10068 IEMOP_HLP_DONE_DECODING();
10069 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10070 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10071 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
10072 IEM_MC_FETCH_EFLAGS(EFlags);
10073 IEM_MC_REF_LOCAL(pu8Al, u8Al);
10074 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10075 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10076 else
10077 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10078
10079 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10080 IEM_MC_COMMIT_EFLAGS(EFlags);
10081 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
10082 IEM_MC_ADVANCE_RIP_AND_FINISH();
10083 IEM_MC_END();
10084 }
10085}
10086
10087/** Opcode 0x0f 0xb1. */
10088FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10089{
10090 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10091 IEMOP_HLP_MIN_486();
10092 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10093
10094 if (IEM_IS_MODRM_REG_MODE(bRm))
10095 {
10096 IEMOP_HLP_DONE_DECODING();
10097 switch (pVCpu->iem.s.enmEffOpSize)
10098 {
10099 case IEMMODE_16BIT:
10100 IEM_MC_BEGIN(4, 0);
10101 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10102 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10103 IEM_MC_ARG(uint16_t, u16Src, 2);
10104 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10105
10106 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10107 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10108 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10109 IEM_MC_REF_EFLAGS(pEFlags);
10110 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10111 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10112 else
10113 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10114
10115 IEM_MC_ADVANCE_RIP_AND_FINISH();
10116 IEM_MC_END();
10117 break;
10118
10119 case IEMMODE_32BIT:
10120 IEM_MC_BEGIN(4, 0);
10121 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10122 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10123 IEM_MC_ARG(uint32_t, u32Src, 2);
10124 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10125
10126 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10127 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10128 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10129 IEM_MC_REF_EFLAGS(pEFlags);
10130 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10131 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10132 else
10133 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10134
10135 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10136 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10137 } IEM_MC_ELSE() {
10138 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
10139 } IEM_MC_ENDIF();
10140
10141 IEM_MC_ADVANCE_RIP_AND_FINISH();
10142 IEM_MC_END();
10143 break;
10144
10145 case IEMMODE_64BIT:
10146 IEM_MC_BEGIN(4, 0);
10147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10148 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10149#ifdef RT_ARCH_X86
10150 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10151#else
10152 IEM_MC_ARG(uint64_t, u64Src, 2);
10153#endif
10154 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10155
10156 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10157 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10158 IEM_MC_REF_EFLAGS(pEFlags);
10159#ifdef RT_ARCH_X86
10160 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10161 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10162 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10163 else
10164 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10165#else
10166 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10167 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10168 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10169 else
10170 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10171#endif
10172
10173 IEM_MC_ADVANCE_RIP_AND_FINISH();
10174 IEM_MC_END();
10175 break;
10176
10177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10178 }
10179 }
10180 else
10181 {
10182 switch (pVCpu->iem.s.enmEffOpSize)
10183 {
10184 case IEMMODE_16BIT:
10185 IEM_MC_BEGIN(4, 3);
10186 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10187 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10188 IEM_MC_ARG(uint16_t, u16Src, 2);
10189 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10191 IEM_MC_LOCAL(uint16_t, u16Ax);
10192
10193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10194 IEMOP_HLP_DONE_DECODING();
10195 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10196 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10197 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
10198 IEM_MC_FETCH_EFLAGS(EFlags);
10199 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
10200 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10201 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10202 else
10203 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10204
10205 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10206 IEM_MC_COMMIT_EFLAGS(EFlags);
10207 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
10208 IEM_MC_ADVANCE_RIP_AND_FINISH();
10209 IEM_MC_END();
10210 break;
10211
10212 case IEMMODE_32BIT:
10213 IEM_MC_BEGIN(4, 3);
10214 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10215 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10216 IEM_MC_ARG(uint32_t, u32Src, 2);
10217 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10218 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10219 IEM_MC_LOCAL(uint32_t, u32Eax);
10220
10221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10222 IEMOP_HLP_DONE_DECODING();
10223 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10224 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10225 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
10226 IEM_MC_FETCH_EFLAGS(EFlags);
10227 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
10228 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10229 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10230 else
10231 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10232
10233 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10234 IEM_MC_COMMIT_EFLAGS(EFlags);
10235
10236 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10237 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
10238 IEM_MC_ENDIF();
10239
10240 IEM_MC_ADVANCE_RIP_AND_FINISH();
10241 IEM_MC_END();
10242 break;
10243
10244 case IEMMODE_64BIT:
10245 IEM_MC_BEGIN(4, 3);
10246 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10247 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10248#ifdef RT_ARCH_X86
10249 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10250#else
10251 IEM_MC_ARG(uint64_t, u64Src, 2);
10252#endif
10253 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10255 IEM_MC_LOCAL(uint64_t, u64Rax);
10256
10257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10258 IEMOP_HLP_DONE_DECODING();
10259 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10260 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
10261 IEM_MC_FETCH_EFLAGS(EFlags);
10262 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
10263#ifdef RT_ARCH_X86
10264 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10265 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10266 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10267 else
10268 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10269#else
10270 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10271 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10272 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10273 else
10274 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10275#endif
10276
10277 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10278 IEM_MC_COMMIT_EFLAGS(EFlags);
10279 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
10280 IEM_MC_ADVANCE_RIP_AND_FINISH();
10281 IEM_MC_END();
10282 break;
10283
10284 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10285 }
10286 }
10287}
10288
10289
10290FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
10291{
10292 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
10293 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
10294
10295 switch (pVCpu->iem.s.enmEffOpSize)
10296 {
10297 case IEMMODE_16BIT:
10298 IEM_MC_BEGIN(5, 1);
10299 IEM_MC_ARG(uint16_t, uSel, 0);
10300 IEM_MC_ARG(uint16_t, offSeg, 1);
10301 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10302 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10303 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10304 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10307 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10308 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
10309 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10310 IEM_MC_END();
10311 return VINF_SUCCESS;
10312
10313 case IEMMODE_32BIT:
10314 IEM_MC_BEGIN(5, 1);
10315 IEM_MC_ARG(uint16_t, uSel, 0);
10316 IEM_MC_ARG(uint32_t, offSeg, 1);
10317 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10318 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10319 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10320 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10323 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10324 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
10325 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10326 IEM_MC_END();
10327 return VINF_SUCCESS;
10328
10329 case IEMMODE_64BIT:
10330 IEM_MC_BEGIN(5, 1);
10331 IEM_MC_ARG(uint16_t, uSel, 0);
10332 IEM_MC_ARG(uint64_t, offSeg, 1);
10333 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10334 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10335 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10336 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10339 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
10340 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10341 else
10342 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10343 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
10344 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10345 IEM_MC_END();
10346 return VINF_SUCCESS;
10347
10348 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10349 }
10350}
10351
10352
10353/** Opcode 0x0f 0xb2. */
10354FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10355{
10356 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10357 IEMOP_HLP_MIN_386();
10358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10359 if (IEM_IS_MODRM_REG_MODE(bRm))
10360 return IEMOP_RAISE_INVALID_OPCODE();
10361 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10362}
10363
10364
10365/** Opcode 0x0f 0xb3. */
10366FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10367{
10368 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10369 IEMOP_HLP_MIN_386();
10370 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
10371}
10372
10373
10374/** Opcode 0x0f 0xb4. */
10375FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10376{
10377 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10378 IEMOP_HLP_MIN_386();
10379 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10380 if (IEM_IS_MODRM_REG_MODE(bRm))
10381 return IEMOP_RAISE_INVALID_OPCODE();
10382 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10383}
10384
10385
10386/** Opcode 0x0f 0xb5. */
10387FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10388{
10389 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10390 IEMOP_HLP_MIN_386();
10391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10392 if (IEM_IS_MODRM_REG_MODE(bRm))
10393 return IEMOP_RAISE_INVALID_OPCODE();
10394 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10395}
10396
10397
10398/** Opcode 0x0f 0xb6. */
10399FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10400{
10401 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10402 IEMOP_HLP_MIN_386();
10403
10404 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10405
10406 /*
10407 * If rm is denoting a register, no more instruction bytes.
10408 */
10409 if (IEM_IS_MODRM_REG_MODE(bRm))
10410 {
10411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10412 switch (pVCpu->iem.s.enmEffOpSize)
10413 {
10414 case IEMMODE_16BIT:
10415 IEM_MC_BEGIN(0, 1);
10416 IEM_MC_LOCAL(uint16_t, u16Value);
10417 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10418 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10419 IEM_MC_ADVANCE_RIP_AND_FINISH();
10420 IEM_MC_END();
10421 break;
10422
10423 case IEMMODE_32BIT:
10424 IEM_MC_BEGIN(0, 1);
10425 IEM_MC_LOCAL(uint32_t, u32Value);
10426 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10427 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10428 IEM_MC_ADVANCE_RIP_AND_FINISH();
10429 IEM_MC_END();
10430 break;
10431
10432 case IEMMODE_64BIT:
10433 IEM_MC_BEGIN(0, 1);
10434 IEM_MC_LOCAL(uint64_t, u64Value);
10435 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10436 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10437 IEM_MC_ADVANCE_RIP_AND_FINISH();
10438 IEM_MC_END();
10439 break;
10440
10441 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10442 }
10443 }
10444 else
10445 {
10446 /*
10447 * We're loading a register from memory.
10448 */
10449 switch (pVCpu->iem.s.enmEffOpSize)
10450 {
10451 case IEMMODE_16BIT:
10452 IEM_MC_BEGIN(0, 2);
10453 IEM_MC_LOCAL(uint16_t, u16Value);
10454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10455 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10457 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10458 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10459 IEM_MC_ADVANCE_RIP_AND_FINISH();
10460 IEM_MC_END();
10461 break;
10462
10463 case IEMMODE_32BIT:
10464 IEM_MC_BEGIN(0, 2);
10465 IEM_MC_LOCAL(uint32_t, u32Value);
10466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10469 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10470 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10471 IEM_MC_ADVANCE_RIP_AND_FINISH();
10472 IEM_MC_END();
10473 break;
10474
10475 case IEMMODE_64BIT:
10476 IEM_MC_BEGIN(0, 2);
10477 IEM_MC_LOCAL(uint64_t, u64Value);
10478 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10481 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10482 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10483 IEM_MC_ADVANCE_RIP_AND_FINISH();
10484 IEM_MC_END();
10485 break;
10486
10487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10488 }
10489 }
10490}
10491
10492
10493/** Opcode 0x0f 0xb7. */
10494FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10495{
10496 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10497 IEMOP_HLP_MIN_386();
10498
10499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10500
10501 /** @todo Not entirely sure how the operand size prefix is handled here,
10502 * assuming that it will be ignored. Would be nice to have a few
10503 * test for this. */
10504 /*
10505 * If rm is denoting a register, no more instruction bytes.
10506 */
10507 if (IEM_IS_MODRM_REG_MODE(bRm))
10508 {
10509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10510 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10511 {
10512 IEM_MC_BEGIN(0, 1);
10513 IEM_MC_LOCAL(uint32_t, u32Value);
10514 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10515 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10516 IEM_MC_ADVANCE_RIP_AND_FINISH();
10517 IEM_MC_END();
10518 }
10519 else
10520 {
10521 IEM_MC_BEGIN(0, 1);
10522 IEM_MC_LOCAL(uint64_t, u64Value);
10523 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10524 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10525 IEM_MC_ADVANCE_RIP_AND_FINISH();
10526 IEM_MC_END();
10527 }
10528 }
10529 else
10530 {
10531 /*
10532 * We're loading a register from memory.
10533 */
10534 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10535 {
10536 IEM_MC_BEGIN(0, 2);
10537 IEM_MC_LOCAL(uint32_t, u32Value);
10538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10541 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10542 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10543 IEM_MC_ADVANCE_RIP_AND_FINISH();
10544 IEM_MC_END();
10545 }
10546 else
10547 {
10548 IEM_MC_BEGIN(0, 2);
10549 IEM_MC_LOCAL(uint64_t, u64Value);
10550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10551 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10553 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10554 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10555 IEM_MC_ADVANCE_RIP_AND_FINISH();
10556 IEM_MC_END();
10557 }
10558 }
10559}
10560
10561
10562/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10563FNIEMOP_UD_STUB(iemOp_jmpe);
10564
10565
10566/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
10567FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10568{
10569 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10570 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10571 return iemOp_InvalidNeedRM(pVCpu);
10572#ifndef TST_IEM_CHECK_MC
10573# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10574 static const IEMOPBINSIZES s_Native =
10575 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10576# endif
10577 static const IEMOPBINSIZES s_Fallback =
10578 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10579#endif
10580 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback));
10581}
10582
10583
10584/**
10585 * @opcode 0xb9
10586 * @opinvalid intel-modrm
10587 * @optest ->
10588 */
10589FNIEMOP_DEF(iemOp_Grp10)
10590{
10591 /*
10592 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10593 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10594 */
10595 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10596 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10597 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10598}
10599
10600
10601/** Opcode 0x0f 0xba. */
10602FNIEMOP_DEF(iemOp_Grp8)
10603{
10604 IEMOP_HLP_MIN_386();
10605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10606 PCIEMOPBINSIZES pImpl;
10607 switch (IEM_GET_MODRM_REG_8(bRm))
10608 {
10609 case 0: case 1: case 2: case 3:
10610 /* Both AMD and Intel want full modr/m decoding and imm8. */
10611 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
10612 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
10613 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
10614 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
10615 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
10616 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10617 }
10618 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10619
10620 if (IEM_IS_MODRM_REG_MODE(bRm))
10621 {
10622 /* register destination. */
10623 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10625
10626 switch (pVCpu->iem.s.enmEffOpSize)
10627 {
10628 case IEMMODE_16BIT:
10629 IEM_MC_BEGIN(3, 0);
10630 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10631 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
10632 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10633
10634 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10635 IEM_MC_REF_EFLAGS(pEFlags);
10636 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10637
10638 IEM_MC_ADVANCE_RIP_AND_FINISH();
10639 IEM_MC_END();
10640 break;
10641
10642 case IEMMODE_32BIT:
10643 IEM_MC_BEGIN(3, 0);
10644 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10645 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
10646 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10647
10648 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10649 IEM_MC_REF_EFLAGS(pEFlags);
10650 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10651
10652 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10653 IEM_MC_ADVANCE_RIP_AND_FINISH();
10654 IEM_MC_END();
10655 break;
10656
10657 case IEMMODE_64BIT:
10658 IEM_MC_BEGIN(3, 0);
10659 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10660 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
10661 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10662
10663 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10664 IEM_MC_REF_EFLAGS(pEFlags);
10665 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10666
10667 IEM_MC_ADVANCE_RIP_AND_FINISH();
10668 IEM_MC_END();
10669 break;
10670
10671 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10672 }
10673 }
10674 else
10675 {
10676 /* memory destination. */
10677
10678 uint32_t fAccess;
10679 if (pImpl->pfnLockedU16)
10680 fAccess = IEM_ACCESS_DATA_RW;
10681 else /* BT */
10682 fAccess = IEM_ACCESS_DATA_R;
10683
10684 /** @todo test negative bit offsets! */
10685 switch (pVCpu->iem.s.enmEffOpSize)
10686 {
10687 case IEMMODE_16BIT:
10688 IEM_MC_BEGIN(3, 1);
10689 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10690 IEM_MC_ARG(uint16_t, u16Src, 1);
10691 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10693
10694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10695 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10696 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
10697 if (pImpl->pfnLockedU16)
10698 IEMOP_HLP_DONE_DECODING();
10699 else
10700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10701 IEM_MC_FETCH_EFLAGS(EFlags);
10702 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10703 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10704 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10705 else
10706 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10707 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10708
10709 IEM_MC_COMMIT_EFLAGS(EFlags);
10710 IEM_MC_ADVANCE_RIP_AND_FINISH();
10711 IEM_MC_END();
10712 break;
10713
10714 case IEMMODE_32BIT:
10715 IEM_MC_BEGIN(3, 1);
10716 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10717 IEM_MC_ARG(uint32_t, u32Src, 1);
10718 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10720
10721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10722 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10723 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
10724 if (pImpl->pfnLockedU16)
10725 IEMOP_HLP_DONE_DECODING();
10726 else
10727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10728 IEM_MC_FETCH_EFLAGS(EFlags);
10729 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10730 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10731 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10732 else
10733 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10734 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10735
10736 IEM_MC_COMMIT_EFLAGS(EFlags);
10737 IEM_MC_ADVANCE_RIP_AND_FINISH();
10738 IEM_MC_END();
10739 break;
10740
10741 case IEMMODE_64BIT:
10742 IEM_MC_BEGIN(3, 1);
10743 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10744 IEM_MC_ARG(uint64_t, u64Src, 1);
10745 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10747
10748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10749 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10750 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
10751 if (pImpl->pfnLockedU16)
10752 IEMOP_HLP_DONE_DECODING();
10753 else
10754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10755 IEM_MC_FETCH_EFLAGS(EFlags);
10756 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10757 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10758 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10759 else
10760 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10761 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10762
10763 IEM_MC_COMMIT_EFLAGS(EFlags);
10764 IEM_MC_ADVANCE_RIP_AND_FINISH();
10765 IEM_MC_END();
10766 break;
10767
10768 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10769 }
10770 }
10771}
10772
10773
10774/** Opcode 0x0f 0xbb. */
10775FNIEMOP_DEF(iemOp_btc_Ev_Gv)
10776{
10777 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
10778 IEMOP_HLP_MIN_386();
10779 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
10780}
10781
10782
10783/**
10784 * Common worker for BSF and BSR instructions.
10785 *
10786 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
10787 * the destination register, which means that for 32-bit operations the high
10788 * bits must be left alone.
10789 *
10790 * @param pImpl Pointer to the instruction implementation (assembly).
10791 */
10792FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
10793{
10794 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10795
10796 /*
10797 * If rm is denoting a register, no more instruction bytes.
10798 */
10799 if (IEM_IS_MODRM_REG_MODE(bRm))
10800 {
10801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10802 switch (pVCpu->iem.s.enmEffOpSize)
10803 {
10804 case IEMMODE_16BIT:
10805 IEM_MC_BEGIN(3, 0);
10806 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10807 IEM_MC_ARG(uint16_t, u16Src, 1);
10808 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10809
10810 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10811 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10812 IEM_MC_REF_EFLAGS(pEFlags);
10813 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10814
10815 IEM_MC_ADVANCE_RIP_AND_FINISH();
10816 IEM_MC_END();
10817 break;
10818
10819 case IEMMODE_32BIT:
10820 IEM_MC_BEGIN(3, 0);
10821 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10822 IEM_MC_ARG(uint32_t, u32Src, 1);
10823 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10824
10825 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10826 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10827 IEM_MC_REF_EFLAGS(pEFlags);
10828 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10829 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10830 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10831 IEM_MC_ENDIF();
10832 IEM_MC_ADVANCE_RIP_AND_FINISH();
10833 IEM_MC_END();
10834 break;
10835
10836 case IEMMODE_64BIT:
10837 IEM_MC_BEGIN(3, 0);
10838 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10839 IEM_MC_ARG(uint64_t, u64Src, 1);
10840 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10841
10842 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10843 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10844 IEM_MC_REF_EFLAGS(pEFlags);
10845 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10846
10847 IEM_MC_ADVANCE_RIP_AND_FINISH();
10848 IEM_MC_END();
10849 break;
10850
10851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10852 }
10853 }
10854 else
10855 {
10856 /*
10857 * We're accessing memory.
10858 */
10859 switch (pVCpu->iem.s.enmEffOpSize)
10860 {
10861 case IEMMODE_16BIT:
10862 IEM_MC_BEGIN(3, 1);
10863 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10864 IEM_MC_ARG(uint16_t, u16Src, 1);
10865 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10867
10868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10870 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10871 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10872 IEM_MC_REF_EFLAGS(pEFlags);
10873 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10874
10875 IEM_MC_ADVANCE_RIP_AND_FINISH();
10876 IEM_MC_END();
10877 break;
10878
10879 case IEMMODE_32BIT:
10880 IEM_MC_BEGIN(3, 1);
10881 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10882 IEM_MC_ARG(uint32_t, u32Src, 1);
10883 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10885
10886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10888 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10889 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10890 IEM_MC_REF_EFLAGS(pEFlags);
10891 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10892
10893 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10894 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10895 IEM_MC_ENDIF();
10896 IEM_MC_ADVANCE_RIP_AND_FINISH();
10897 IEM_MC_END();
10898 break;
10899
10900 case IEMMODE_64BIT:
10901 IEM_MC_BEGIN(3, 1);
10902 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10903 IEM_MC_ARG(uint64_t, u64Src, 1);
10904 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10906
10907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10909 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10910 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10911 IEM_MC_REF_EFLAGS(pEFlags);
10912 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10913
10914 IEM_MC_ADVANCE_RIP_AND_FINISH();
10915 IEM_MC_END();
10916 break;
10917
10918 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10919 }
10920 }
10921}
10922
10923
10924/** Opcode 0x0f 0xbc. */
10925FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
10926{
10927 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
10928 IEMOP_HLP_MIN_386();
10929 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10930 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
10931}
10932
10933
10934/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
10935FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
10936{
10937 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
10938 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
10939 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10940
10941#ifndef TST_IEM_CHECK_MC
10942 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
10943 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
10944 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
10945 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
10946 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
10947 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
10948 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
10949 {
10950 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
10951 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
10952 };
10953#endif
10954 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
10955 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
10956 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
10957}
10958
10959
10960/** Opcode 0x0f 0xbd. */
10961FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
10962{
10963 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
10964 IEMOP_HLP_MIN_386();
10965 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10966 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
10967}
10968
10969
10970/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
10971FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
10972{
10973 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
10974 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
10975 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10976
10977#ifndef TST_IEM_CHECK_MC
10978 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
10979 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
10980 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
10981 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
10982 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
10983 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
10984 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
10985 {
10986 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
10987 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
10988 };
10989#endif
10990 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
10991 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
10992 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
10993}
10994
10995
10996
10997/** Opcode 0x0f 0xbe. */
10998FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
10999{
11000 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11001 IEMOP_HLP_MIN_386();
11002
11003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11004
11005 /*
11006 * If rm is denoting a register, no more instruction bytes.
11007 */
11008 if (IEM_IS_MODRM_REG_MODE(bRm))
11009 {
11010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11011 switch (pVCpu->iem.s.enmEffOpSize)
11012 {
11013 case IEMMODE_16BIT:
11014 IEM_MC_BEGIN(0, 1);
11015 IEM_MC_LOCAL(uint16_t, u16Value);
11016 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11017 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11018 IEM_MC_ADVANCE_RIP_AND_FINISH();
11019 IEM_MC_END();
11020 break;
11021
11022 case IEMMODE_32BIT:
11023 IEM_MC_BEGIN(0, 1);
11024 IEM_MC_LOCAL(uint32_t, u32Value);
11025 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11026 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11027 IEM_MC_ADVANCE_RIP_AND_FINISH();
11028 IEM_MC_END();
11029 break;
11030
11031 case IEMMODE_64BIT:
11032 IEM_MC_BEGIN(0, 1);
11033 IEM_MC_LOCAL(uint64_t, u64Value);
11034 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11035 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11036 IEM_MC_ADVANCE_RIP_AND_FINISH();
11037 IEM_MC_END();
11038 break;
11039
11040 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11041 }
11042 }
11043 else
11044 {
11045 /*
11046 * We're loading a register from memory.
11047 */
11048 switch (pVCpu->iem.s.enmEffOpSize)
11049 {
11050 case IEMMODE_16BIT:
11051 IEM_MC_BEGIN(0, 2);
11052 IEM_MC_LOCAL(uint16_t, u16Value);
11053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11056 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11057 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11058 IEM_MC_ADVANCE_RIP_AND_FINISH();
11059 IEM_MC_END();
11060 break;
11061
11062 case IEMMODE_32BIT:
11063 IEM_MC_BEGIN(0, 2);
11064 IEM_MC_LOCAL(uint32_t, u32Value);
11065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11068 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11069 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11070 IEM_MC_ADVANCE_RIP_AND_FINISH();
11071 IEM_MC_END();
11072 break;
11073
11074 case IEMMODE_64BIT:
11075 IEM_MC_BEGIN(0, 2);
11076 IEM_MC_LOCAL(uint64_t, u64Value);
11077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11080 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11081 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11082 IEM_MC_ADVANCE_RIP_AND_FINISH();
11083 IEM_MC_END();
11084 break;
11085
11086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11087 }
11088 }
11089}
11090
11091
11092/** Opcode 0x0f 0xbf. */
11093FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11094{
11095 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11096 IEMOP_HLP_MIN_386();
11097
11098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11099
11100 /** @todo Not entirely sure how the operand size prefix is handled here,
11101 * assuming that it will be ignored. Would be nice to have a few
11102 * test for this. */
11103 /*
11104 * If rm is denoting a register, no more instruction bytes.
11105 */
11106 if (IEM_IS_MODRM_REG_MODE(bRm))
11107 {
11108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11109 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11110 {
11111 IEM_MC_BEGIN(0, 1);
11112 IEM_MC_LOCAL(uint32_t, u32Value);
11113 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11114 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11115 IEM_MC_ADVANCE_RIP_AND_FINISH();
11116 IEM_MC_END();
11117 }
11118 else
11119 {
11120 IEM_MC_BEGIN(0, 1);
11121 IEM_MC_LOCAL(uint64_t, u64Value);
11122 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11123 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11124 IEM_MC_ADVANCE_RIP_AND_FINISH();
11125 IEM_MC_END();
11126 }
11127 }
11128 else
11129 {
11130 /*
11131 * We're loading a register from memory.
11132 */
11133 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11134 {
11135 IEM_MC_BEGIN(0, 2);
11136 IEM_MC_LOCAL(uint32_t, u32Value);
11137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11140 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11141 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11142 IEM_MC_ADVANCE_RIP_AND_FINISH();
11143 IEM_MC_END();
11144 }
11145 else
11146 {
11147 IEM_MC_BEGIN(0, 2);
11148 IEM_MC_LOCAL(uint64_t, u64Value);
11149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11152 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11153 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11154 IEM_MC_ADVANCE_RIP_AND_FINISH();
11155 IEM_MC_END();
11156 }
11157 }
11158}
11159
11160
11161/** Opcode 0x0f 0xc0. */
11162FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11163{
11164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11165 IEMOP_HLP_MIN_486();
11166 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11167
11168 /*
11169 * If rm is denoting a register, no more instruction bytes.
11170 */
11171 if (IEM_IS_MODRM_REG_MODE(bRm))
11172 {
11173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11174
11175 IEM_MC_BEGIN(3, 0);
11176 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11177 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11178 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11179
11180 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11181 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11182 IEM_MC_REF_EFLAGS(pEFlags);
11183 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11184
11185 IEM_MC_ADVANCE_RIP_AND_FINISH();
11186 IEM_MC_END();
11187 }
11188 else
11189 {
11190 /*
11191 * We're accessing memory.
11192 */
11193 IEM_MC_BEGIN(3, 3);
11194 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11195 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11196 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11197 IEM_MC_LOCAL(uint8_t, u8RegCopy);
11198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11199
11200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11201 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11202 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11203 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
11204 IEM_MC_FETCH_EFLAGS(EFlags);
11205 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11206 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11207 else
11208 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
11209
11210 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11211 IEM_MC_COMMIT_EFLAGS(EFlags);
11212 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
11213 IEM_MC_ADVANCE_RIP_AND_FINISH();
11214 IEM_MC_END();
11215 }
11216}
11217
11218
11219/** Opcode 0x0f 0xc1. */
11220FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11221{
11222 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11223 IEMOP_HLP_MIN_486();
11224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11225
11226 /*
11227 * If rm is denoting a register, no more instruction bytes.
11228 */
11229 if (IEM_IS_MODRM_REG_MODE(bRm))
11230 {
11231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11232
11233 switch (pVCpu->iem.s.enmEffOpSize)
11234 {
11235 case IEMMODE_16BIT:
11236 IEM_MC_BEGIN(3, 0);
11237 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11238 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11239 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11240
11241 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11242 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11243 IEM_MC_REF_EFLAGS(pEFlags);
11244 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11245
11246 IEM_MC_ADVANCE_RIP_AND_FINISH();
11247 IEM_MC_END();
11248 break;
11249
11250 case IEMMODE_32BIT:
11251 IEM_MC_BEGIN(3, 0);
11252 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11253 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11255
11256 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11257 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11258 IEM_MC_REF_EFLAGS(pEFlags);
11259 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11260
11261 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11262 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11263 IEM_MC_ADVANCE_RIP_AND_FINISH();
11264 IEM_MC_END();
11265 break;
11266
11267 case IEMMODE_64BIT:
11268 IEM_MC_BEGIN(3, 0);
11269 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11270 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11271 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11272
11273 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11274 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11275 IEM_MC_REF_EFLAGS(pEFlags);
11276 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11277
11278 IEM_MC_ADVANCE_RIP_AND_FINISH();
11279 IEM_MC_END();
11280 break;
11281
11282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11283 }
11284 }
11285 else
11286 {
11287 /*
11288 * We're accessing memory.
11289 */
11290 switch (pVCpu->iem.s.enmEffOpSize)
11291 {
11292 case IEMMODE_16BIT:
11293 IEM_MC_BEGIN(3, 3);
11294 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11295 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11296 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11297 IEM_MC_LOCAL(uint16_t, u16RegCopy);
11298 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11299
11300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11301 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11302 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11303 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
11304 IEM_MC_FETCH_EFLAGS(EFlags);
11305 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11306 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11307 else
11308 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
11309
11310 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
11311 IEM_MC_COMMIT_EFLAGS(EFlags);
11312 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
11313 IEM_MC_ADVANCE_RIP_AND_FINISH();
11314 IEM_MC_END();
11315 break;
11316
11317 case IEMMODE_32BIT:
11318 IEM_MC_BEGIN(3, 3);
11319 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11320 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11321 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11322 IEM_MC_LOCAL(uint32_t, u32RegCopy);
11323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11324
11325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11326 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11327 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11328 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
11329 IEM_MC_FETCH_EFLAGS(EFlags);
11330 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11331 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11332 else
11333 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
11334
11335 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
11336 IEM_MC_COMMIT_EFLAGS(EFlags);
11337 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
11338 IEM_MC_ADVANCE_RIP_AND_FINISH();
11339 IEM_MC_END();
11340 break;
11341
11342 case IEMMODE_64BIT:
11343 IEM_MC_BEGIN(3, 3);
11344 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11345 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11346 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11347 IEM_MC_LOCAL(uint64_t, u64RegCopy);
11348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11349
11350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11351 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11352 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11353 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
11354 IEM_MC_FETCH_EFLAGS(EFlags);
11355 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11356 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11357 else
11358 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
11359
11360 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
11361 IEM_MC_COMMIT_EFLAGS(EFlags);
11362 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
11363 IEM_MC_ADVANCE_RIP_AND_FINISH();
11364 IEM_MC_END();
11365 break;
11366
11367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11368 }
11369 }
11370}
11371
11372
11373/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11374FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11375{
11376 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11377
11378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11379 if (IEM_IS_MODRM_REG_MODE(bRm))
11380 {
11381 /*
11382 * XMM, XMM.
11383 */
11384 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11386 IEM_MC_BEGIN(4, 2);
11387 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11388 IEM_MC_LOCAL(X86XMMREG, Dst);
11389 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11390 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11391 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11392 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11393 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11394 IEM_MC_PREPARE_SSE_USAGE();
11395 IEM_MC_REF_MXCSR(pfMxcsr);
11396 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11397 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11398 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11399 IEM_MC_IF_MXCSR_XCPT_PENDING()
11400 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11401 IEM_MC_ELSE()
11402 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11403 IEM_MC_ENDIF();
11404
11405 IEM_MC_ADVANCE_RIP_AND_FINISH();
11406 IEM_MC_END();
11407 }
11408 else
11409 {
11410 /*
11411 * XMM, [mem128].
11412 */
11413 IEM_MC_BEGIN(4, 3);
11414 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11415 IEM_MC_LOCAL(X86XMMREG, Dst);
11416 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11417 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11418 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11420
11421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11422 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11423 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11425 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11426 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11427
11428 IEM_MC_PREPARE_SSE_USAGE();
11429 IEM_MC_REF_MXCSR(pfMxcsr);
11430 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11431 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11432 IEM_MC_IF_MXCSR_XCPT_PENDING()
11433 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11434 IEM_MC_ELSE()
11435 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11436 IEM_MC_ENDIF();
11437
11438 IEM_MC_ADVANCE_RIP_AND_FINISH();
11439 IEM_MC_END();
11440 }
11441}
11442
11443
11444/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11445FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11446{
11447 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11448
11449 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11450 if (IEM_IS_MODRM_REG_MODE(bRm))
11451 {
11452 /*
11453 * XMM, XMM.
11454 */
11455 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11457 IEM_MC_BEGIN(4, 2);
11458 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11459 IEM_MC_LOCAL(X86XMMREG, Dst);
11460 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11461 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11462 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11463 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11464 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11465 IEM_MC_PREPARE_SSE_USAGE();
11466 IEM_MC_REF_MXCSR(pfMxcsr);
11467 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11468 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11469 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11470 IEM_MC_IF_MXCSR_XCPT_PENDING()
11471 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11472 IEM_MC_ELSE()
11473 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11474 IEM_MC_ENDIF();
11475
11476 IEM_MC_ADVANCE_RIP_AND_FINISH();
11477 IEM_MC_END();
11478 }
11479 else
11480 {
11481 /*
11482 * XMM, [mem128].
11483 */
11484 IEM_MC_BEGIN(4, 3);
11485 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11486 IEM_MC_LOCAL(X86XMMREG, Dst);
11487 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11488 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11489 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11491
11492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11493 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11494 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11496 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11497 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11498
11499 IEM_MC_PREPARE_SSE_USAGE();
11500 IEM_MC_REF_MXCSR(pfMxcsr);
11501 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11502 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11503 IEM_MC_IF_MXCSR_XCPT_PENDING()
11504 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11505 IEM_MC_ELSE()
11506 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11507 IEM_MC_ENDIF();
11508
11509 IEM_MC_ADVANCE_RIP_AND_FINISH();
11510 IEM_MC_END();
11511 }
11512}
11513
11514
11515/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11516FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11517{
11518 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11519
11520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11521 if (IEM_IS_MODRM_REG_MODE(bRm))
11522 {
11523 /*
11524 * XMM32, XMM32.
11525 */
11526 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11528 IEM_MC_BEGIN(4, 2);
11529 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11530 IEM_MC_LOCAL(X86XMMREG, Dst);
11531 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11532 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11533 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11534 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11535 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11536 IEM_MC_PREPARE_SSE_USAGE();
11537 IEM_MC_REF_MXCSR(pfMxcsr);
11538 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11539 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11540 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11541 IEM_MC_IF_MXCSR_XCPT_PENDING()
11542 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11543 IEM_MC_ELSE()
11544 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11545 IEM_MC_ENDIF();
11546
11547 IEM_MC_ADVANCE_RIP_AND_FINISH();
11548 IEM_MC_END();
11549 }
11550 else
11551 {
11552 /*
11553 * XMM32, [mem32].
11554 */
11555 IEM_MC_BEGIN(4, 3);
11556 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11557 IEM_MC_LOCAL(X86XMMREG, Dst);
11558 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11559 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11560 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11562
11563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11564 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11565 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11567 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11568 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11569
11570 IEM_MC_PREPARE_SSE_USAGE();
11571 IEM_MC_REF_MXCSR(pfMxcsr);
11572 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11573 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11574 IEM_MC_IF_MXCSR_XCPT_PENDING()
11575 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11576 IEM_MC_ELSE()
11577 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11578 IEM_MC_ENDIF();
11579
11580 IEM_MC_ADVANCE_RIP_AND_FINISH();
11581 IEM_MC_END();
11582 }
11583}
11584
11585
11586/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11587FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11588{
11589 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11590
11591 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11592 if (IEM_IS_MODRM_REG_MODE(bRm))
11593 {
11594 /*
11595 * XMM64, XMM64.
11596 */
11597 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11599 IEM_MC_BEGIN(4, 2);
11600 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11601 IEM_MC_LOCAL(X86XMMREG, Dst);
11602 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11603 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11604 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11605 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11606 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11607 IEM_MC_PREPARE_SSE_USAGE();
11608 IEM_MC_REF_MXCSR(pfMxcsr);
11609 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11610 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11611 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11612 IEM_MC_IF_MXCSR_XCPT_PENDING()
11613 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11614 IEM_MC_ELSE()
11615 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11616 IEM_MC_ENDIF();
11617
11618 IEM_MC_ADVANCE_RIP_AND_FINISH();
11619 IEM_MC_END();
11620 }
11621 else
11622 {
11623 /*
11624 * XMM64, [mem64].
11625 */
11626 IEM_MC_BEGIN(4, 3);
11627 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11628 IEM_MC_LOCAL(X86XMMREG, Dst);
11629 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11630 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11631 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11633
11634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11635 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11636 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11638 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11639 IEM_MC_FETCH_MEM_XMM_U64(Src.uSrc2, 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11640
11641 IEM_MC_PREPARE_SSE_USAGE();
11642 IEM_MC_REF_MXCSR(pfMxcsr);
11643 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11644 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11645 IEM_MC_IF_MXCSR_XCPT_PENDING()
11646 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11647 IEM_MC_ELSE()
11648 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11649 IEM_MC_ENDIF();
11650
11651 IEM_MC_ADVANCE_RIP_AND_FINISH();
11652 IEM_MC_END();
11653 }
11654}
11655
11656
11657/** Opcode 0x0f 0xc3. */
11658FNIEMOP_DEF(iemOp_movnti_My_Gy)
11659{
11660 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
11661
11662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11663
11664 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
11665 if (IEM_IS_MODRM_MEM_MODE(bRm))
11666 {
11667 switch (pVCpu->iem.s.enmEffOpSize)
11668 {
11669 case IEMMODE_32BIT:
11670 IEM_MC_BEGIN(0, 2);
11671 IEM_MC_LOCAL(uint32_t, u32Value);
11672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11673
11674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11676 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
11677 return IEMOP_RAISE_INVALID_OPCODE();
11678
11679 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11680 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11681 IEM_MC_ADVANCE_RIP_AND_FINISH();
11682 IEM_MC_END();
11683 break;
11684
11685 case IEMMODE_64BIT:
11686 IEM_MC_BEGIN(0, 2);
11687 IEM_MC_LOCAL(uint64_t, u64Value);
11688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11689
11690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11692 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
11693 return IEMOP_RAISE_INVALID_OPCODE();
11694
11695 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11696 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11697 IEM_MC_ADVANCE_RIP_AND_FINISH();
11698 IEM_MC_END();
11699 break;
11700
11701 case IEMMODE_16BIT:
11702 /** @todo check this form. */
11703 return IEMOP_RAISE_INVALID_OPCODE();
11704
11705 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11706 }
11707 }
11708 else
11709 return IEMOP_RAISE_INVALID_OPCODE();
11710}
11711
11712
11713/* Opcode 0x66 0x0f 0xc3 - invalid */
11714/* Opcode 0xf3 0x0f 0xc3 - invalid */
11715/* Opcode 0xf2 0x0f 0xc3 - invalid */
11716
11717
11718/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
11719FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
11720{
11721 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
11722 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11723 if (IEM_IS_MODRM_REG_MODE(bRm))
11724 {
11725 /*
11726 * Register, register.
11727 */
11728 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11730 IEM_MC_BEGIN(3, 0);
11731 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11732 IEM_MC_ARG(uint16_t, u16Src, 1);
11733 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11734 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11735 IEM_MC_PREPARE_FPU_USAGE();
11736 IEM_MC_FPU_TO_MMX_MODE();
11737 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
11738 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11739 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
11740 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11741 IEM_MC_ADVANCE_RIP_AND_FINISH();
11742 IEM_MC_END();
11743 }
11744 else
11745 {
11746 /*
11747 * Register, memory.
11748 */
11749 IEM_MC_BEGIN(3, 1);
11750 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11751 IEM_MC_ARG(uint16_t, u16Src, 1);
11752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11753
11754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11755 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11756 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11758 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11759 IEM_MC_PREPARE_FPU_USAGE();
11760 IEM_MC_FPU_TO_MMX_MODE();
11761
11762 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11763 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
11764 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
11765 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11766 IEM_MC_ADVANCE_RIP_AND_FINISH();
11767 IEM_MC_END();
11768 }
11769}
11770
11771
11772/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
11773FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
11774{
11775 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11777 if (IEM_IS_MODRM_REG_MODE(bRm))
11778 {
11779 /*
11780 * Register, register.
11781 */
11782 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11784 IEM_MC_BEGIN(3, 0);
11785 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11786 IEM_MC_ARG(uint16_t, u16Src, 1);
11787 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11788 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11789 IEM_MC_PREPARE_SSE_USAGE();
11790 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11791 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11792 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
11793 IEM_MC_ADVANCE_RIP_AND_FINISH();
11794 IEM_MC_END();
11795 }
11796 else
11797 {
11798 /*
11799 * Register, memory.
11800 */
11801 IEM_MC_BEGIN(3, 2);
11802 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11803 IEM_MC_ARG(uint16_t, u16Src, 1);
11804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11805
11806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11807 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11808 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11810 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11811 IEM_MC_PREPARE_SSE_USAGE();
11812
11813 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11814 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11815 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
11816 IEM_MC_ADVANCE_RIP_AND_FINISH();
11817 IEM_MC_END();
11818 }
11819}
11820
11821
11822/* Opcode 0xf3 0x0f 0xc4 - invalid */
11823/* Opcode 0xf2 0x0f 0xc4 - invalid */
11824
11825
11826/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
11827FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
11828{
11829 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);*/ /** @todo */
11830 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11831 if (IEM_IS_MODRM_REG_MODE(bRm))
11832 {
11833 /*
11834 * Greg32, MMX, imm8.
11835 */
11836 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11838 IEM_MC_BEGIN(3, 1);
11839 IEM_MC_LOCAL(uint16_t, u16Dst);
11840 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11841 IEM_MC_ARG(uint64_t, u64Src, 1);
11842 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11843 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11844 IEM_MC_PREPARE_FPU_USAGE();
11845 IEM_MC_FPU_TO_MMX_MODE();
11846 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
11847 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bImmArg);
11848 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
11849 IEM_MC_ADVANCE_RIP_AND_FINISH();
11850 IEM_MC_END();
11851 }
11852 /* No memory operand. */
11853 else
11854 return IEMOP_RAISE_INVALID_OPCODE();
11855}
11856
11857
11858/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
11859FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
11860{
11861 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11862 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11863 if (IEM_IS_MODRM_REG_MODE(bRm))
11864 {
11865 /*
11866 * Greg32, XMM, imm8.
11867 */
11868 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11870 IEM_MC_BEGIN(3, 1);
11871 IEM_MC_LOCAL(uint16_t, u16Dst);
11872 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11873 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
11874 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11875 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11876 IEM_MC_PREPARE_SSE_USAGE();
11877 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11878 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bImmArg);
11879 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
11880 IEM_MC_ADVANCE_RIP_AND_FINISH();
11881 IEM_MC_END();
11882 }
11883 /* No memory operand. */
11884 else
11885 return IEMOP_RAISE_INVALID_OPCODE();
11886}
11887
11888
11889/* Opcode 0xf3 0x0f 0xc5 - invalid */
11890/* Opcode 0xf2 0x0f 0xc5 - invalid */
11891
11892
11893/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
11894FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
11895{
11896 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11897 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11898 if (IEM_IS_MODRM_REG_MODE(bRm))
11899 {
11900 /*
11901 * XMM, XMM, imm8.
11902 */
11903 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11905 IEM_MC_BEGIN(3, 0);
11906 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11907 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
11908 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11909 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11910 IEM_MC_PREPARE_SSE_USAGE();
11911 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11912 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11913 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
11914 IEM_MC_ADVANCE_RIP_AND_FINISH();
11915 IEM_MC_END();
11916 }
11917 else
11918 {
11919 /*
11920 * XMM, [mem128], imm8.
11921 */
11922 IEM_MC_BEGIN(3, 2);
11923 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11924 IEM_MC_LOCAL(RTUINT128U, uSrc);
11925 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
11926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11927
11928 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11929 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11930 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11932 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11933 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11934
11935 IEM_MC_PREPARE_SSE_USAGE();
11936 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11937 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
11938
11939 IEM_MC_ADVANCE_RIP_AND_FINISH();
11940 IEM_MC_END();
11941 }
11942}
11943
11944
11945/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
11946FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
11947{
11948 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11949 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11950 if (IEM_IS_MODRM_REG_MODE(bRm))
11951 {
11952 /*
11953 * XMM, XMM, imm8.
11954 */
11955 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11957 IEM_MC_BEGIN(3, 0);
11958 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11959 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
11960 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11961 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11962 IEM_MC_PREPARE_SSE_USAGE();
11963 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11964 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11965 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
11966 IEM_MC_ADVANCE_RIP_AND_FINISH();
11967 IEM_MC_END();
11968 }
11969 else
11970 {
11971 /*
11972 * XMM, [mem128], imm8.
11973 */
11974 IEM_MC_BEGIN(3, 2);
11975 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11976 IEM_MC_LOCAL(RTUINT128U, uSrc);
11977 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
11978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11979
11980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11981 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11982 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11984 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11985 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11986
11987 IEM_MC_PREPARE_SSE_USAGE();
11988 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11989 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
11990
11991 IEM_MC_ADVANCE_RIP_AND_FINISH();
11992 IEM_MC_END();
11993 }
11994}
11995
11996
11997/* Opcode 0xf3 0x0f 0xc6 - invalid */
11998/* Opcode 0xf2 0x0f 0xc6 - invalid */
11999
12000
12001/** Opcode 0x0f 0xc7 !11/1. */
12002FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12003{
12004 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12005
12006 IEM_MC_BEGIN(4, 3);
12007 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
12008 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
12009 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
12010 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12011 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
12012 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
12013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12014
12015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12016 IEMOP_HLP_DONE_DECODING();
12017 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12018
12019 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
12020 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
12021 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
12022
12023 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
12024 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
12025 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
12026
12027 IEM_MC_FETCH_EFLAGS(EFlags);
12028 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12029 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12030 else
12031 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12032
12033 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
12034 IEM_MC_COMMIT_EFLAGS(EFlags);
12035 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
12036 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
12037 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
12038 IEM_MC_ENDIF();
12039 IEM_MC_ADVANCE_RIP_AND_FINISH();
12040
12041 IEM_MC_END();
12042}
12043
12044
12045/** Opcode REX.W 0x0f 0xc7 !11/1. */
12046FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12047{
12048 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12049 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12050 {
12051#if 0
12052 RT_NOREF(bRm);
12053 IEMOP_BITCH_ABOUT_STUB();
12054 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
12055#else
12056 IEM_MC_BEGIN(4, 3);
12057 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
12058 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
12059 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
12060 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12061 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
12062 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
12063 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12064
12065 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12066 IEMOP_HLP_DONE_DECODING();
12067 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
12068 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12069
12070 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
12071 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
12072 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
12073
12074 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
12075 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
12076 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
12077
12078 IEM_MC_FETCH_EFLAGS(EFlags);
12079# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)
12080# if defined(RT_ARCH_AMD64)
12081 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12082# endif
12083 {
12084 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12085 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12086 else
12087 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12088 }
12089# if defined(RT_ARCH_AMD64)
12090 else
12091# endif
12092# endif
12093# if !defined(RT_ARCH_ARM64) /** @todo may need this for unaligned accesses... */
12094 {
12095 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12096 accesses and not all all atomic, which works fine on in UNI CPU guest
12097 configuration (ignoring DMA). If guest SMP is active we have no choice
12098 but to use a rendezvous callback here. Sigh. */
12099 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12100 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12101 else
12102 {
12103 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12104 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12105 }
12106 }
12107# endif
12108
12109 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
12110 IEM_MC_COMMIT_EFLAGS(EFlags);
12111 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
12112 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
12113 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
12114 IEM_MC_ENDIF();
12115 IEM_MC_ADVANCE_RIP_AND_FINISH();
12116
12117 IEM_MC_END();
12118#endif
12119 }
12120 Log(("cmpxchg16b -> #UD\n"));
12121 return IEMOP_RAISE_INVALID_OPCODE();
12122}
12123
12124FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12125{
12126 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12127 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12128 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12129}
12130
12131
12132/** Opcode 0x0f 0xc7 11/6. */
12133FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12134{
12135 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12136 return IEMOP_RAISE_INVALID_OPCODE();
12137
12138 if (IEM_IS_MODRM_REG_MODE(bRm))
12139 {
12140 /* register destination. */
12141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12142 switch (pVCpu->iem.s.enmEffOpSize)
12143 {
12144 case IEMMODE_16BIT:
12145 IEM_MC_BEGIN(2, 0);
12146 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12147 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12148
12149 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12150 IEM_MC_REF_EFLAGS(pEFlags);
12151 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u16, iemAImpl_rdrand_u16_fallback),
12152 pu16Dst, pEFlags);
12153
12154 IEM_MC_ADVANCE_RIP_AND_FINISH();
12155 IEM_MC_END();
12156 break;
12157
12158 case IEMMODE_32BIT:
12159 IEM_MC_BEGIN(2, 0);
12160 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12161 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12162
12163 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12164 IEM_MC_REF_EFLAGS(pEFlags);
12165 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u32, iemAImpl_rdrand_u32_fallback),
12166 pu32Dst, pEFlags);
12167
12168 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12169 IEM_MC_ADVANCE_RIP_AND_FINISH();
12170 IEM_MC_END();
12171 break;
12172
12173 case IEMMODE_64BIT:
12174 IEM_MC_BEGIN(2, 0);
12175 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12176 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12177
12178 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12179 IEM_MC_REF_EFLAGS(pEFlags);
12180 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u64, iemAImpl_rdrand_u64_fallback),
12181 pu64Dst, pEFlags);
12182
12183 IEM_MC_ADVANCE_RIP_AND_FINISH();
12184 IEM_MC_END();
12185 break;
12186
12187 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12188 }
12189 }
12190 /* Register only. */
12191 else
12192 return IEMOP_RAISE_INVALID_OPCODE();
12193}
12194
12195/** Opcode 0x0f 0xc7 !11/6. */
12196#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12197FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12198{
12199 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12200 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12201 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12202 IEM_MC_BEGIN(2, 0);
12203 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12204 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12206 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12207 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12208 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12209 IEM_MC_END();
12210 return VINF_SUCCESS;
12211}
12212#else
12213FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12214#endif
12215
12216/** Opcode 0x66 0x0f 0xc7 !11/6. */
12217#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12218FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12219{
12220 IEMOP_MNEMONIC(vmclear, "vmclear");
12221 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12222 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12223 IEM_MC_BEGIN(2, 0);
12224 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12225 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12227 IEMOP_HLP_DONE_DECODING();
12228 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12229 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12230 IEM_MC_END();
12231 return VINF_SUCCESS;
12232}
12233#else
12234FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12235#endif
12236
12237/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12238#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12239FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12240{
12241 IEMOP_MNEMONIC(vmxon, "vmxon");
12242 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12243 IEM_MC_BEGIN(2, 0);
12244 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12245 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12247 IEMOP_HLP_DONE_DECODING();
12248 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12249 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12250 IEM_MC_END();
12251 return VINF_SUCCESS;
12252}
12253#else
12254FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12255#endif
12256
12257/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12258#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12259FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12260{
12261 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12262 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12263 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12264 IEM_MC_BEGIN(2, 0);
12265 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12266 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12268 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12269 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12270 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12271 IEM_MC_END();
12272 return VINF_SUCCESS;
12273}
12274#else
12275FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12276#endif
12277
12278/** Opcode 0x0f 0xc7 11/7. */
12279FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12280{
12281 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12282 return IEMOP_RAISE_INVALID_OPCODE();
12283
12284 if (IEM_IS_MODRM_REG_MODE(bRm))
12285 {
12286 /* register destination. */
12287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12288 switch (pVCpu->iem.s.enmEffOpSize)
12289 {
12290 case IEMMODE_16BIT:
12291 IEM_MC_BEGIN(2, 0);
12292 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12293 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12294
12295 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12296 IEM_MC_REF_EFLAGS(pEFlags);
12297 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u16, iemAImpl_rdseed_u16_fallback),
12298 pu16Dst, pEFlags);
12299
12300 IEM_MC_ADVANCE_RIP_AND_FINISH();
12301 IEM_MC_END();
12302 break;
12303
12304 case IEMMODE_32BIT:
12305 IEM_MC_BEGIN(2, 0);
12306 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12307 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12308
12309 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12310 IEM_MC_REF_EFLAGS(pEFlags);
12311 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u32, iemAImpl_rdseed_u32_fallback),
12312 pu32Dst, pEFlags);
12313
12314 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12315 IEM_MC_ADVANCE_RIP_AND_FINISH();
12316 IEM_MC_END();
12317 break;
12318
12319 case IEMMODE_64BIT:
12320 IEM_MC_BEGIN(2, 0);
12321 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12322 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12323
12324 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12325 IEM_MC_REF_EFLAGS(pEFlags);
12326 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u64, iemAImpl_rdseed_u64_fallback),
12327 pu64Dst, pEFlags);
12328
12329 IEM_MC_ADVANCE_RIP_AND_FINISH();
12330 IEM_MC_END();
12331 break;
12332
12333 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12334 }
12335 }
12336 /* Register only. */
12337 else
12338 return IEMOP_RAISE_INVALID_OPCODE();
12339}
12340
12341/**
12342 * Group 9 jump table for register variant.
12343 */
12344IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12345{ /* pfx: none, 066h, 0f3h, 0f2h */
12346 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12347 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12348 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12349 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12350 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12351 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12352 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12353 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12354};
12355AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12356
12357
12358/**
12359 * Group 9 jump table for memory variant.
12360 */
12361IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12362{ /* pfx: none, 066h, 0f3h, 0f2h */
12363 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12364 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12365 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12366 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12367 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12368 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12369 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12370 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12371};
12372AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12373
12374
12375/** Opcode 0x0f 0xc7. */
12376FNIEMOP_DEF(iemOp_Grp9)
12377{
12378 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
12379 if (IEM_IS_MODRM_REG_MODE(bRm))
12380 /* register, register */
12381 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12382 + pVCpu->iem.s.idxPrefix], bRm);
12383 /* memory, register */
12384 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12385 + pVCpu->iem.s.idxPrefix], bRm);
12386}
12387
12388
12389/**
12390 * Common 'bswap register' helper.
12391 */
12392FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12393{
12394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12395 switch (pVCpu->iem.s.enmEffOpSize)
12396 {
12397 case IEMMODE_16BIT:
12398 IEM_MC_BEGIN(1, 0);
12399 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12400 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12401 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12402 IEM_MC_ADVANCE_RIP_AND_FINISH();
12403 IEM_MC_END();
12404 break;
12405
12406 case IEMMODE_32BIT:
12407 IEM_MC_BEGIN(1, 0);
12408 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12409 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12410 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12411 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12412 IEM_MC_ADVANCE_RIP_AND_FINISH();
12413 IEM_MC_END();
12414 break;
12415
12416 case IEMMODE_64BIT:
12417 IEM_MC_BEGIN(1, 0);
12418 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12419 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12420 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12421 IEM_MC_ADVANCE_RIP_AND_FINISH();
12422 IEM_MC_END();
12423 break;
12424
12425 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12426 }
12427}
12428
12429
12430/** Opcode 0x0f 0xc8. */
12431FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12432{
12433 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12434 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12435 prefix. REX.B is the correct prefix it appears. For a parallel
12436 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12437 IEMOP_HLP_MIN_486();
12438 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12439}
12440
12441
12442/** Opcode 0x0f 0xc9. */
12443FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12444{
12445 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12446 IEMOP_HLP_MIN_486();
12447 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12448}
12449
12450
12451/** Opcode 0x0f 0xca. */
12452FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12453{
12454 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12455 IEMOP_HLP_MIN_486();
12456 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12457}
12458
12459
12460/** Opcode 0x0f 0xcb. */
12461FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12462{
12463 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12464 IEMOP_HLP_MIN_486();
12465 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12466}
12467
12468
12469/** Opcode 0x0f 0xcc. */
12470FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12471{
12472 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12473 IEMOP_HLP_MIN_486();
12474 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12475}
12476
12477
12478/** Opcode 0x0f 0xcd. */
12479FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12480{
12481 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12482 IEMOP_HLP_MIN_486();
12483 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12484}
12485
12486
12487/** Opcode 0x0f 0xce. */
12488FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12489{
12490 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12491 IEMOP_HLP_MIN_486();
12492 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12493}
12494
12495
12496/** Opcode 0x0f 0xcf. */
12497FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12498{
12499 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12500 IEMOP_HLP_MIN_486();
12501 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12502}
12503
12504
12505/* Opcode 0x0f 0xd0 - invalid */
12506
12507
12508/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12509FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12510{
12511 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12512 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12513}
12514
12515
12516/* Opcode 0xf3 0x0f 0xd0 - invalid */
12517
12518
12519/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12520FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12521{
12522 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12523 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12524}
12525
12526
12527
12528/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12529FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12530{
12531 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12532 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12533}
12534
12535/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12536FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12537{
12538 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12539 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12540}
12541
12542/* Opcode 0xf3 0x0f 0xd1 - invalid */
12543/* Opcode 0xf2 0x0f 0xd1 - invalid */
12544
12545/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12546FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12547{
12548 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12549 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12550}
12551
12552
12553/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12554FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12555{
12556 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12557 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12558}
12559
12560
12561/* Opcode 0xf3 0x0f 0xd2 - invalid */
12562/* Opcode 0xf2 0x0f 0xd2 - invalid */
12563
12564/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12565FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12566{
12567 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12568 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12569}
12570
12571
12572/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12573FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12574{
12575 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12576 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12577}
12578
12579
12580/* Opcode 0xf3 0x0f 0xd3 - invalid */
12581/* Opcode 0xf2 0x0f 0xd3 - invalid */
12582
12583
12584/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12585FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12586{
12587 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12588 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_paddq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
12589}
12590
12591
12592/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12593FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12594{
12595 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12596 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
12597}
12598
12599
12600/* Opcode 0xf3 0x0f 0xd4 - invalid */
12601/* Opcode 0xf2 0x0f 0xd4 - invalid */
12602
12603/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12604FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12605{
12606 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12607 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
12608}
12609
12610/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12611FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12612{
12613 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12614 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
12615}
12616
12617
12618/* Opcode 0xf3 0x0f 0xd5 - invalid */
12619/* Opcode 0xf2 0x0f 0xd5 - invalid */
12620
12621/* Opcode 0x0f 0xd6 - invalid */
12622
12623/**
12624 * @opcode 0xd6
12625 * @oppfx 0x66
12626 * @opcpuid sse2
12627 * @opgroup og_sse2_pcksclr_datamove
12628 * @opxcpttype none
12629 * @optest op1=-1 op2=2 -> op1=2
12630 * @optest op1=0 op2=-42 -> op1=-42
12631 */
12632FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12633{
12634 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12636 if (IEM_IS_MODRM_REG_MODE(bRm))
12637 {
12638 /*
12639 * Register, register.
12640 */
12641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12642 IEM_MC_BEGIN(0, 2);
12643 IEM_MC_LOCAL(uint64_t, uSrc);
12644
12645 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12646 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12647
12648 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12649 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12650
12651 IEM_MC_ADVANCE_RIP_AND_FINISH();
12652 IEM_MC_END();
12653 }
12654 else
12655 {
12656 /*
12657 * Memory, register.
12658 */
12659 IEM_MC_BEGIN(0, 2);
12660 IEM_MC_LOCAL(uint64_t, uSrc);
12661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12662
12663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12665 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12666 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12667
12668 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12669 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12670
12671 IEM_MC_ADVANCE_RIP_AND_FINISH();
12672 IEM_MC_END();
12673 }
12674}
12675
12676
12677/**
12678 * @opcode 0xd6
12679 * @opcodesub 11 mr/reg
12680 * @oppfx f3
12681 * @opcpuid sse2
12682 * @opgroup og_sse2_simdint_datamove
12683 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12684 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12685 */
12686FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12687{
12688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12689 if (IEM_IS_MODRM_REG_MODE(bRm))
12690 {
12691 /*
12692 * Register, register.
12693 */
12694 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12696 IEM_MC_BEGIN(0, 1);
12697 IEM_MC_LOCAL(uint64_t, uSrc);
12698
12699 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12700 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12701 IEM_MC_FPU_TO_MMX_MODE();
12702
12703 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
12704 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
12705
12706 IEM_MC_ADVANCE_RIP_AND_FINISH();
12707 IEM_MC_END();
12708 }
12709
12710 /**
12711 * @opdone
12712 * @opmnemonic udf30fd6mem
12713 * @opcode 0xd6
12714 * @opcodesub !11 mr/reg
12715 * @oppfx f3
12716 * @opunused intel-modrm
12717 * @opcpuid sse
12718 * @optest ->
12719 */
12720 else
12721 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12722}
12723
12724
12725/**
12726 * @opcode 0xd6
12727 * @opcodesub 11 mr/reg
12728 * @oppfx f2
12729 * @opcpuid sse2
12730 * @opgroup og_sse2_simdint_datamove
12731 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12732 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12733 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
12734 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
12735 * @optest op1=-42 op2=0xfedcba9876543210
12736 * -> op1=0xfedcba9876543210 ftw=0xff
12737 */
12738FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
12739{
12740 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12741 if (IEM_IS_MODRM_REG_MODE(bRm))
12742 {
12743 /*
12744 * Register, register.
12745 */
12746 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12748 IEM_MC_BEGIN(0, 1);
12749 IEM_MC_LOCAL(uint64_t, uSrc);
12750
12751 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12752 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12753 IEM_MC_FPU_TO_MMX_MODE();
12754
12755 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
12756 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
12757
12758 IEM_MC_ADVANCE_RIP_AND_FINISH();
12759 IEM_MC_END();
12760 }
12761
12762 /**
12763 * @opdone
12764 * @opmnemonic udf20fd6mem
12765 * @opcode 0xd6
12766 * @opcodesub !11 mr/reg
12767 * @oppfx f2
12768 * @opunused intel-modrm
12769 * @opcpuid sse
12770 * @optest ->
12771 */
12772 else
12773 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12774}
12775
12776
12777/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
12778FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
12779{
12780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12781 /* Docs says register only. */
12782 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12783 {
12784 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12785 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS, 0);
12786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12787 IEM_MC_BEGIN(2, 0);
12788 IEM_MC_ARG(uint64_t *, puDst, 0);
12789 IEM_MC_ARG(uint64_t const *, puSrc, 1);
12790 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
12791 IEM_MC_PREPARE_FPU_USAGE();
12792 IEM_MC_FPU_TO_MMX_MODE();
12793
12794 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12795 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
12796 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
12797
12798 IEM_MC_ADVANCE_RIP_AND_FINISH();
12799 IEM_MC_END();
12800 }
12801 else
12802 return IEMOP_RAISE_INVALID_OPCODE();
12803}
12804
12805
12806/** Opcode 0x66 0x0f 0xd7 - */
12807FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
12808{
12809 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12810 /* Docs says register only. */
12811 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12812 {
12813 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12814 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
12815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12816 IEM_MC_BEGIN(2, 0);
12817 IEM_MC_ARG(uint64_t *, puDst, 0);
12818 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12819 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12820 IEM_MC_PREPARE_SSE_USAGE();
12821 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12822 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12823 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
12824 IEM_MC_ADVANCE_RIP_AND_FINISH();
12825 IEM_MC_END();
12826 }
12827 else
12828 return IEMOP_RAISE_INVALID_OPCODE();
12829}
12830
12831
12832/* Opcode 0xf3 0x0f 0xd7 - invalid */
12833/* Opcode 0xf2 0x0f 0xd7 - invalid */
12834
12835
12836/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
12837FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
12838{
12839 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12840 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
12841}
12842
12843
12844/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
12845FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
12846{
12847 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12848 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
12849}
12850
12851
12852/* Opcode 0xf3 0x0f 0xd8 - invalid */
12853/* Opcode 0xf2 0x0f 0xd8 - invalid */
12854
12855/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
12856FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
12857{
12858 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12859 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
12860}
12861
12862
12863/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
12864FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
12865{
12866 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12867 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
12868}
12869
12870
12871/* Opcode 0xf3 0x0f 0xd9 - invalid */
12872/* Opcode 0xf2 0x0f 0xd9 - invalid */
12873
12874/** Opcode 0x0f 0xda - pminub Pq, Qq */
12875FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
12876{
12877 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12878 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
12879}
12880
12881
12882/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
12883FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
12884{
12885 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12886 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
12887}
12888
12889/* Opcode 0xf3 0x0f 0xda - invalid */
12890/* Opcode 0xf2 0x0f 0xda - invalid */
12891
12892/** Opcode 0x0f 0xdb - pand Pq, Qq */
12893FNIEMOP_DEF(iemOp_pand_Pq_Qq)
12894{
12895 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12896 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
12897}
12898
12899
12900/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
12901FNIEMOP_DEF(iemOp_pand_Vx_Wx)
12902{
12903 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12904 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
12905}
12906
12907
12908/* Opcode 0xf3 0x0f 0xdb - invalid */
12909/* Opcode 0xf2 0x0f 0xdb - invalid */
12910
12911/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
12912FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
12913{
12914 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12915 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
12916}
12917
12918
12919/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
12920FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
12921{
12922 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12923 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
12924}
12925
12926
12927/* Opcode 0xf3 0x0f 0xdc - invalid */
12928/* Opcode 0xf2 0x0f 0xdc - invalid */
12929
12930/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
12931FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
12932{
12933 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12934 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
12935}
12936
12937
12938/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
12939FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
12940{
12941 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12942 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
12943}
12944
12945
12946/* Opcode 0xf3 0x0f 0xdd - invalid */
12947/* Opcode 0xf2 0x0f 0xdd - invalid */
12948
12949/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
12950FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
12951{
12952 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12953 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
12954}
12955
12956
12957/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
12958FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
12959{
12960 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12961 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
12962}
12963
12964/* Opcode 0xf3 0x0f 0xde - invalid */
12965/* Opcode 0xf2 0x0f 0xde - invalid */
12966
12967
12968/** Opcode 0x0f 0xdf - pandn Pq, Qq */
12969FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
12970{
12971 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12972 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
12973}
12974
12975
12976/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
12977FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
12978{
12979 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12980 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
12981}
12982
12983
12984/* Opcode 0xf3 0x0f 0xdf - invalid */
12985/* Opcode 0xf2 0x0f 0xdf - invalid */
12986
12987/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
12988FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
12989{
12990 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12991 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
12992}
12993
12994
12995/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
12996FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
12997{
12998 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12999 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13000}
13001
13002
13003/* Opcode 0xf3 0x0f 0xe0 - invalid */
13004/* Opcode 0xf2 0x0f 0xe0 - invalid */
13005
13006/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13007FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13008{
13009 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13010 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13011}
13012
13013
13014/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13015FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13016{
13017 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13018 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13019}
13020
13021
13022/* Opcode 0xf3 0x0f 0xe1 - invalid */
13023/* Opcode 0xf2 0x0f 0xe1 - invalid */
13024
13025/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13026FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13027{
13028 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13029 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13030}
13031
13032
13033/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13034FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13035{
13036 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13037 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13038}
13039
13040
13041/* Opcode 0xf3 0x0f 0xe2 - invalid */
13042/* Opcode 0xf2 0x0f 0xe2 - invalid */
13043
13044/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13045FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13046{
13047 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13048 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13049}
13050
13051
13052/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13053FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13054{
13055 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13056 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13057}
13058
13059
13060/* Opcode 0xf3 0x0f 0xe3 - invalid */
13061/* Opcode 0xf2 0x0f 0xe3 - invalid */
13062
13063/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13064FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13065{
13066 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13067 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13068}
13069
13070
13071/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13072FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13073{
13074 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13075 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13076}
13077
13078
13079/* Opcode 0xf3 0x0f 0xe4 - invalid */
13080/* Opcode 0xf2 0x0f 0xe4 - invalid */
13081
13082/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13083FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13084{
13085 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13086 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
13087}
13088
13089
13090/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13091FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13092{
13093 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13094 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
13095}
13096
13097
13098/* Opcode 0xf3 0x0f 0xe5 - invalid */
13099/* Opcode 0xf2 0x0f 0xe5 - invalid */
13100/* Opcode 0x0f 0xe6 - invalid */
13101
13102
13103/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13104FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13105{
13106 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13107 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13108}
13109
13110
13111/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13112FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13113{
13114 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13115 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13116}
13117
13118
13119/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13120FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13121{
13122 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13123 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13124}
13125
13126
13127/**
13128 * @opcode 0xe7
13129 * @opcodesub !11 mr/reg
13130 * @oppfx none
13131 * @opcpuid sse
13132 * @opgroup og_sse1_cachect
13133 * @opxcpttype none
13134 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13135 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13136 */
13137FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13138{
13139 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13140 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13141 if (IEM_IS_MODRM_MEM_MODE(bRm))
13142 {
13143 /* Register, memory. */
13144 IEM_MC_BEGIN(0, 2);
13145 IEM_MC_LOCAL(uint64_t, uSrc);
13146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13147
13148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13150 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13151 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13152 IEM_MC_FPU_TO_MMX_MODE();
13153
13154 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13155 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13156
13157 IEM_MC_ADVANCE_RIP_AND_FINISH();
13158 IEM_MC_END();
13159 }
13160 /**
13161 * @opdone
13162 * @opmnemonic ud0fe7reg
13163 * @opcode 0xe7
13164 * @opcodesub 11 mr/reg
13165 * @oppfx none
13166 * @opunused immediate
13167 * @opcpuid sse
13168 * @optest ->
13169 */
13170 else
13171 return IEMOP_RAISE_INVALID_OPCODE();
13172}
13173
13174/**
13175 * @opcode 0xe7
13176 * @opcodesub !11 mr/reg
13177 * @oppfx 0x66
13178 * @opcpuid sse2
13179 * @opgroup og_sse2_cachect
13180 * @opxcpttype 1
13181 * @optest op1=-1 op2=2 -> op1=2
13182 * @optest op1=0 op2=-42 -> op1=-42
13183 */
13184FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13185{
13186 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13187 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13188 if (IEM_IS_MODRM_MEM_MODE(bRm))
13189 {
13190 /* Register, memory. */
13191 IEM_MC_BEGIN(0, 2);
13192 IEM_MC_LOCAL(RTUINT128U, uSrc);
13193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13194
13195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13197 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
13198 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13199
13200 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13201 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13202
13203 IEM_MC_ADVANCE_RIP_AND_FINISH();
13204 IEM_MC_END();
13205 }
13206
13207 /**
13208 * @opdone
13209 * @opmnemonic ud660fe7reg
13210 * @opcode 0xe7
13211 * @opcodesub 11 mr/reg
13212 * @oppfx 0x66
13213 * @opunused immediate
13214 * @opcpuid sse
13215 * @optest ->
13216 */
13217 else
13218 return IEMOP_RAISE_INVALID_OPCODE();
13219}
13220
13221/* Opcode 0xf3 0x0f 0xe7 - invalid */
13222/* Opcode 0xf2 0x0f 0xe7 - invalid */
13223
13224
13225/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13226FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13227{
13228 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13229 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
13230}
13231
13232
13233/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13234FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13235{
13236 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13237 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
13238}
13239
13240
13241/* Opcode 0xf3 0x0f 0xe8 - invalid */
13242/* Opcode 0xf2 0x0f 0xe8 - invalid */
13243
13244/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13245FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13246{
13247 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13248 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
13249}
13250
13251
13252/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13253FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13254{
13255 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13256 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
13257}
13258
13259
13260/* Opcode 0xf3 0x0f 0xe9 - invalid */
13261/* Opcode 0xf2 0x0f 0xe9 - invalid */
13262
13263
13264/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13265FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13266{
13267 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13268 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
13269}
13270
13271
13272/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13273FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13274{
13275 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13276 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
13277}
13278
13279
13280/* Opcode 0xf3 0x0f 0xea - invalid */
13281/* Opcode 0xf2 0x0f 0xea - invalid */
13282
13283
13284/** Opcode 0x0f 0xeb - por Pq, Qq */
13285FNIEMOP_DEF(iemOp_por_Pq_Qq)
13286{
13287 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13288 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
13289}
13290
13291
13292/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13293FNIEMOP_DEF(iemOp_por_Vx_Wx)
13294{
13295 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13296 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
13297}
13298
13299
13300/* Opcode 0xf3 0x0f 0xeb - invalid */
13301/* Opcode 0xf2 0x0f 0xeb - invalid */
13302
13303/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13304FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13305{
13306 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13307 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
13308}
13309
13310
13311/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13312FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13313{
13314 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13315 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
13316}
13317
13318
13319/* Opcode 0xf3 0x0f 0xec - invalid */
13320/* Opcode 0xf2 0x0f 0xec - invalid */
13321
13322/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13323FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13324{
13325 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13326 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
13327}
13328
13329
13330/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13331FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13332{
13333 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13334 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
13335}
13336
13337
13338/* Opcode 0xf3 0x0f 0xed - invalid */
13339/* Opcode 0xf2 0x0f 0xed - invalid */
13340
13341
13342/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13343FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13344{
13345 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13346 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13347}
13348
13349
13350/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13351FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13352{
13353 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13354 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13355}
13356
13357
13358/* Opcode 0xf3 0x0f 0xee - invalid */
13359/* Opcode 0xf2 0x0f 0xee - invalid */
13360
13361
13362/** Opcode 0x0f 0xef - pxor Pq, Qq */
13363FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13364{
13365 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13366 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
13367}
13368
13369
13370/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13371FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13372{
13373 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13374 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
13375}
13376
13377
13378/* Opcode 0xf3 0x0f 0xef - invalid */
13379/* Opcode 0xf2 0x0f 0xef - invalid */
13380
13381/* Opcode 0x0f 0xf0 - invalid */
13382/* Opcode 0x66 0x0f 0xf0 - invalid */
13383
13384
13385/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13386FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13387{
13388 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13390 if (IEM_IS_MODRM_REG_MODE(bRm))
13391 {
13392 /*
13393 * Register, register - (not implemented, assuming it raises \#UD).
13394 */
13395 return IEMOP_RAISE_INVALID_OPCODE();
13396 }
13397 else
13398 {
13399 /*
13400 * Register, memory.
13401 */
13402 IEM_MC_BEGIN(0, 2);
13403 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13405
13406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13408 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
13409 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13410 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13411 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13412
13413 IEM_MC_ADVANCE_RIP_AND_FINISH();
13414 IEM_MC_END();
13415 }
13416}
13417
13418
13419/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13420FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13421{
13422 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13423 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13424}
13425
13426
13427/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13428FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13429{
13430 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13431 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13432}
13433
13434
13435/* Opcode 0xf2 0x0f 0xf1 - invalid */
13436
13437/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13438FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13439{
13440 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13441 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13442}
13443
13444
13445/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13446FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13447{
13448 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13449 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13450}
13451
13452
13453/* Opcode 0xf2 0x0f 0xf2 - invalid */
13454
13455/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13456FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13457{
13458 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13459 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13460}
13461
13462
13463/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13464FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13465{
13466 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13467 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13468}
13469
13470/* Opcode 0xf2 0x0f 0xf3 - invalid */
13471
13472/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13473FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13474{
13475 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13476 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
13477}
13478
13479
13480/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13481FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13482{
13483 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13484 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
13485}
13486
13487
13488/* Opcode 0xf2 0x0f 0xf4 - invalid */
13489
13490/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13491FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13492{
13493 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13494 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13495}
13496
13497
13498/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13499FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13500{
13501 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13502 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13503}
13504
13505/* Opcode 0xf2 0x0f 0xf5 - invalid */
13506
13507/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13508FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13509{
13510 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13511 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13512}
13513
13514
13515/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13516FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13517{
13518 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13519 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13520}
13521
13522
13523/* Opcode 0xf2 0x0f 0xf6 - invalid */
13524
13525/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13526FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13527/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13528FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13529/* Opcode 0xf2 0x0f 0xf7 - invalid */
13530
13531
13532/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13533FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13534{
13535 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13536 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
13537}
13538
13539
13540/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13541FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13542{
13543 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13544 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
13545}
13546
13547
13548/* Opcode 0xf2 0x0f 0xf8 - invalid */
13549
13550
13551/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13552FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13553{
13554 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13555 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
13556}
13557
13558
13559/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13560FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13561{
13562 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13563 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
13564}
13565
13566
13567/* Opcode 0xf2 0x0f 0xf9 - invalid */
13568
13569
13570/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13571FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13572{
13573 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13574 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
13575}
13576
13577
13578/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13579FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13580{
13581 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13582 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
13583}
13584
13585
13586/* Opcode 0xf2 0x0f 0xfa - invalid */
13587
13588
13589/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13590FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13591{
13592 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13593 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_psubq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
13594}
13595
13596
13597/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13598FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13599{
13600 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13601 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
13602}
13603
13604
13605/* Opcode 0xf2 0x0f 0xfb - invalid */
13606
13607
13608/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13609FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13610{
13611 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13612 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
13613}
13614
13615
13616/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13617FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13618{
13619 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13620 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
13621}
13622
13623
13624/* Opcode 0xf2 0x0f 0xfc - invalid */
13625
13626
13627/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13628FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13629{
13630 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13631 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
13632}
13633
13634
13635/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
13636FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
13637{
13638 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13639 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
13640}
13641
13642
13643/* Opcode 0xf2 0x0f 0xfd - invalid */
13644
13645
13646/** Opcode 0x0f 0xfe - paddd Pq, Qq */
13647FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
13648{
13649 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13650 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
13651}
13652
13653
13654/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
13655FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
13656{
13657 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13658 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
13659}
13660
13661
13662/* Opcode 0xf2 0x0f 0xfe - invalid */
13663
13664
13665/** Opcode **** 0x0f 0xff - UD0 */
13666FNIEMOP_DEF(iemOp_ud0)
13667{
13668 IEMOP_MNEMONIC(ud0, "ud0");
13669 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
13670 {
13671 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
13672#ifndef TST_IEM_CHECK_MC
13673 if (IEM_IS_MODRM_MEM_MODE(bRm))
13674 {
13675 RTGCPTR GCPtrEff;
13676 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
13677 if (rcStrict != VINF_SUCCESS)
13678 return rcStrict;
13679 }
13680#endif
13681 IEMOP_HLP_DONE_DECODING();
13682 }
13683 return IEMOP_RAISE_INVALID_OPCODE();
13684}
13685
13686
13687
13688/**
13689 * Two byte opcode map, first byte 0x0f.
13690 *
13691 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
13692 * check if it needs updating as well when making changes.
13693 */
13694IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
13695{
13696 /* no prefix, 066h prefix f3h prefix, f2h prefix */
13697 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
13698 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
13699 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
13700 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
13701 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
13702 /* 0x05 */ IEMOP_X4(iemOp_syscall),
13703 /* 0x06 */ IEMOP_X4(iemOp_clts),
13704 /* 0x07 */ IEMOP_X4(iemOp_sysret),
13705 /* 0x08 */ IEMOP_X4(iemOp_invd),
13706 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
13707 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
13708 /* 0x0b */ IEMOP_X4(iemOp_ud2),
13709 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
13710 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
13711 /* 0x0e */ IEMOP_X4(iemOp_femms),
13712 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
13713
13714 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
13715 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
13716 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
13717 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13718 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13719 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13720 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
13721 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13722 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
13723 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
13724 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
13725 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
13726 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
13727 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
13728 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
13729 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
13730
13731 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
13732 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
13733 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
13734 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
13735 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
13736 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13737 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
13738 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13739 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13740 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13741 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
13742 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13743 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
13744 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
13745 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13746 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13747
13748 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
13749 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
13750 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
13751 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
13752 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
13753 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
13754 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
13755 /* 0x37 */ IEMOP_X4(iemOp_getsec),
13756 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
13757 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13758 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
13759 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13760 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13761 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13762 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13763 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13764
13765 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
13766 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
13767 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
13768 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
13769 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
13770 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
13771 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
13772 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
13773 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
13774 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
13775 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
13776 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
13777 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
13778 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
13779 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
13780 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
13781
13782 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13783 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
13784 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
13785 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
13786 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13787 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13788 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13789 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13790 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
13791 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
13792 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
13793 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
13794 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
13795 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
13796 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
13797 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
13798
13799 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13800 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13801 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13802 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13803 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13804 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13805 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13806 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13807 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13808 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13809 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13810 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13811 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13812 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13813 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13814 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
13815
13816 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
13817 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
13818 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
13819 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
13820 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13821 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13822 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13823 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13824
13825 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13826 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13827 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13828 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13829 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
13830 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
13831 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
13832 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
13833
13834 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
13835 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
13836 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
13837 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
13838 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
13839 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
13840 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
13841 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
13842 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
13843 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
13844 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
13845 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
13846 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
13847 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
13848 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
13849 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
13850
13851 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
13852 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
13853 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
13854 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
13855 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
13856 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
13857 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
13858 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
13859 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
13860 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
13861 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
13862 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
13863 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
13864 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
13865 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
13866 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
13867
13868 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
13869 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
13870 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
13871 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
13872 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
13873 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
13874 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
13875 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
13876 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
13877 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
13878 /* 0xaa */ IEMOP_X4(iemOp_rsm),
13879 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
13880 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
13881 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
13882 /* 0xae */ IEMOP_X4(iemOp_Grp15),
13883 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
13884
13885 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
13886 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
13887 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
13888 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
13889 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
13890 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
13891 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
13892 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
13893 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
13894 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
13895 /* 0xba */ IEMOP_X4(iemOp_Grp8),
13896 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
13897 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
13898 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
13899 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
13900 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
13901
13902 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
13903 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
13904 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
13905 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13906 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13907 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13908 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13909 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
13910 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
13911 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
13912 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
13913 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
13914 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
13915 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
13916 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
13917 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
13918
13919 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
13920 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13921 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13922 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13923 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13924 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13925 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
13926 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13927 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13928 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13929 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13930 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13931 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13932 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13933 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13934 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13935
13936 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13937 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13938 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13939 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13940 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13941 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13942 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
13943 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13944 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13945 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13946 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13947 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13948 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13949 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13950 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13951 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13952
13953 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
13954 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13955 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13956 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13957 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13958 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13959 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13960 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13961 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13962 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13963 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13964 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13965 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13966 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13967 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13968 /* 0xff */ IEMOP_X4(iemOp_ud0),
13969};
13970AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
13971
13972/** @} */
13973
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette