VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 97405

Last change on this file since 97405 was 97361, checked in by vboxsync, 2 years ago

VMM/IEM: Removed a lot of now unnecessary return statements, while keeping unnecessary break statements for the look of the think. Also added missing IEM_NOT_REACHED_DEFAULT_CASE_RET uses to try make sure all cases in the switches will return and we can skip the function (typically) return statement. bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 481.7 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 97361 2022-11-01 02:02:24Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * MMX, MMX.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
53 IEM_MC_BEGIN(2, 0);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_FPU_TO_MMX_MODE();
59
60 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
61 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
62 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
63 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
64
65 IEM_MC_ADVANCE_RIP_AND_FINISH();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * MMX, [mem64].
72 */
73 IEM_MC_BEGIN(2, 2);
74 IEM_MC_ARG(uint64_t *, pDst, 0);
75 IEM_MC_LOCAL(uint64_t, uSrc);
76 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
81 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_FPU_USAGE();
85 IEM_MC_FPU_TO_MMX_MODE();
86
87 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
88 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
89 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
90
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94}
95
96
97/**
98 * Common worker for MMX instructions on the form:
99 * pxxx mm1, mm2/mem64
100 *
101 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
102 * no FXSAVE state, just the operands.
103 */
104FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
105{
106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
107 if (IEM_IS_MODRM_REG_MODE(bRm))
108 {
109 /*
110 * MMX, MMX.
111 */
112 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
113 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
115 IEM_MC_BEGIN(2, 0);
116 IEM_MC_ARG(uint64_t *, pDst, 0);
117 IEM_MC_ARG(uint64_t const *, pSrc, 1);
118 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
119 IEM_MC_PREPARE_FPU_USAGE();
120 IEM_MC_FPU_TO_MMX_MODE();
121
122 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
123 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
124 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
125 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
126
127 IEM_MC_ADVANCE_RIP_AND_FINISH();
128 IEM_MC_END();
129 }
130 else
131 {
132 /*
133 * MMX, [mem64].
134 */
135 IEM_MC_BEGIN(2, 2);
136 IEM_MC_ARG(uint64_t *, pDst, 0);
137 IEM_MC_LOCAL(uint64_t, uSrc);
138 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
140
141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
143 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
144 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
145
146 IEM_MC_PREPARE_FPU_USAGE();
147 IEM_MC_FPU_TO_MMX_MODE();
148
149 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
150 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
151 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
152
153 IEM_MC_ADVANCE_RIP_AND_FINISH();
154 IEM_MC_END();
155 }
156}
157
158
159/**
160 * Common worker for MMX instructions on the form:
161 * pxxx mm1, mm2/mem64
162 * for instructions introduced with SSE.
163 */
164FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
165{
166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
167 if (IEM_IS_MODRM_REG_MODE(bRm))
168 {
169 /*
170 * MMX, MMX.
171 */
172 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
173 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
175 IEM_MC_BEGIN(2, 0);
176 IEM_MC_ARG(uint64_t *, pDst, 0);
177 IEM_MC_ARG(uint64_t const *, pSrc, 1);
178 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
179 IEM_MC_PREPARE_FPU_USAGE();
180 IEM_MC_FPU_TO_MMX_MODE();
181
182 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
183 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
184 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
185 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
186
187 IEM_MC_ADVANCE_RIP_AND_FINISH();
188 IEM_MC_END();
189 }
190 else
191 {
192 /*
193 * MMX, [mem64].
194 */
195 IEM_MC_BEGIN(2, 2);
196 IEM_MC_ARG(uint64_t *, pDst, 0);
197 IEM_MC_LOCAL(uint64_t, uSrc);
198 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200
201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
203 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
204 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
205
206 IEM_MC_PREPARE_FPU_USAGE();
207 IEM_MC_FPU_TO_MMX_MODE();
208
209 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
210 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
211 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
212
213 IEM_MC_ADVANCE_RIP_AND_FINISH();
214 IEM_MC_END();
215 }
216}
217
218
219/**
220 * Common worker for MMX instructions on the form:
221 * pxxx mm1, mm2/mem64
222 * for instructions introduced with SSE.
223 *
224 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
225 * no FXSAVE state, just the operands.
226 */
227FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
228{
229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
230 if (IEM_IS_MODRM_REG_MODE(bRm))
231 {
232 /*
233 * MMX, MMX.
234 */
235 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
236 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
238 IEM_MC_BEGIN(2, 0);
239 IEM_MC_ARG(uint64_t *, pDst, 0);
240 IEM_MC_ARG(uint64_t const *, pSrc, 1);
241 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
242 IEM_MC_PREPARE_FPU_USAGE();
243 IEM_MC_FPU_TO_MMX_MODE();
244
245 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
246 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
247 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
248 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
249
250 IEM_MC_ADVANCE_RIP_AND_FINISH();
251 IEM_MC_END();
252 }
253 else
254 {
255 /*
256 * MMX, [mem64].
257 */
258 IEM_MC_BEGIN(2, 2);
259 IEM_MC_ARG(uint64_t *, pDst, 0);
260 IEM_MC_LOCAL(uint64_t, uSrc);
261 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
263
264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
267 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
268
269 IEM_MC_PREPARE_FPU_USAGE();
270 IEM_MC_FPU_TO_MMX_MODE();
271
272 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
273 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
274 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
275
276 IEM_MC_ADVANCE_RIP_AND_FINISH();
277 IEM_MC_END();
278 }
279}
280
281
282/**
283 * Common worker for MMX instructions on the form:
284 * pxxx mm1, mm2/mem64
285 * that was introduced with SSE2.
286 */
287FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported)
288{
289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
290 if (IEM_IS_MODRM_REG_MODE(bRm))
291 {
292 /*
293 * MMX, MMX.
294 */
295 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
296 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
298 IEM_MC_BEGIN(2, 0);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_ARG(uint64_t const *, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
302 IEM_MC_PREPARE_FPU_USAGE();
303 IEM_MC_FPU_TO_MMX_MODE();
304
305 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
306 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
307 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
308 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
309
310 IEM_MC_ADVANCE_RIP_AND_FINISH();
311 IEM_MC_END();
312 }
313 else
314 {
315 /*
316 * MMX, [mem64].
317 */
318 IEM_MC_BEGIN(2, 2);
319 IEM_MC_ARG(uint64_t *, pDst, 0);
320 IEM_MC_LOCAL(uint64_t, uSrc);
321 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
323
324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
326 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
327 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
328
329 IEM_MC_PREPARE_FPU_USAGE();
330 IEM_MC_FPU_TO_MMX_MODE();
331
332 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
333 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
334 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
335
336 IEM_MC_ADVANCE_RIP_AND_FINISH();
337 IEM_MC_END();
338 }
339}
340
341
342/**
343 * Common worker for SSE instructions of the form:
344 * pxxx xmm1, xmm2/mem128
345 *
346 * Proper alignment of the 128-bit operand is enforced.
347 * SSE cpuid checks. No SIMD FP exceptions.
348 *
349 * @sa iemOpCommonSse2_FullFull_To_Full
350 */
351FNIEMOP_DEF_1(iemOpCommonSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
352{
353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
354 if (IEM_IS_MODRM_REG_MODE(bRm))
355 {
356 /*
357 * XMM, XMM.
358 */
359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
360 IEM_MC_BEGIN(2, 0);
361 IEM_MC_ARG(PRTUINT128U, pDst, 0);
362 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
364 IEM_MC_PREPARE_SSE_USAGE();
365 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
366 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
367 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
368 IEM_MC_ADVANCE_RIP_AND_FINISH();
369 IEM_MC_END();
370 }
371 else
372 {
373 /*
374 * XMM, [mem128].
375 */
376 IEM_MC_BEGIN(2, 2);
377 IEM_MC_ARG(PRTUINT128U, pDst, 0);
378 IEM_MC_LOCAL(RTUINT128U, uSrc);
379 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
381
382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
384 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
385 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
386
387 IEM_MC_PREPARE_SSE_USAGE();
388 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
389 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
390
391 IEM_MC_ADVANCE_RIP_AND_FINISH();
392 IEM_MC_END();
393 }
394}
395
396
397/**
398 * Common worker for SSE2 instructions on the forms:
399 * pxxx xmm1, xmm2/mem128
400 *
401 * Proper alignment of the 128-bit operand is enforced.
402 * Exceptions type 4. SSE2 cpuid checks.
403 *
404 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
405 */
406FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
407{
408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
409 if (IEM_IS_MODRM_REG_MODE(bRm))
410 {
411 /*
412 * XMM, XMM.
413 */
414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
415 IEM_MC_BEGIN(2, 0);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
418 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
419 IEM_MC_PREPARE_SSE_USAGE();
420 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
421 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
422 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
423 IEM_MC_ADVANCE_RIP_AND_FINISH();
424 IEM_MC_END();
425 }
426 else
427 {
428 /*
429 * XMM, [mem128].
430 */
431 IEM_MC_BEGIN(2, 2);
432 IEM_MC_ARG(PRTUINT128U, pDst, 0);
433 IEM_MC_LOCAL(RTUINT128U, uSrc);
434 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
436
437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
439 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
440 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
441
442 IEM_MC_PREPARE_SSE_USAGE();
443 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
444 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
445
446 IEM_MC_ADVANCE_RIP_AND_FINISH();
447 IEM_MC_END();
448 }
449}
450
451
452/**
453 * Common worker for SSE2 instructions on the forms:
454 * pxxx xmm1, xmm2/mem128
455 *
456 * Proper alignment of the 128-bit operand is enforced.
457 * Exceptions type 4. SSE2 cpuid checks.
458 *
459 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
460 * no FXSAVE state, just the operands.
461 *
462 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
463 */
464FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
465{
466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
467 if (IEM_IS_MODRM_REG_MODE(bRm))
468 {
469 /*
470 * XMM, XMM.
471 */
472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
473 IEM_MC_BEGIN(2, 0);
474 IEM_MC_ARG(PRTUINT128U, pDst, 0);
475 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
476 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
477 IEM_MC_PREPARE_SSE_USAGE();
478 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
479 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
480 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
481 IEM_MC_ADVANCE_RIP_AND_FINISH();
482 IEM_MC_END();
483 }
484 else
485 {
486 /*
487 * XMM, [mem128].
488 */
489 IEM_MC_BEGIN(2, 2);
490 IEM_MC_ARG(PRTUINT128U, pDst, 0);
491 IEM_MC_LOCAL(RTUINT128U, uSrc);
492 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
494
495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
497 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
498 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
499
500 IEM_MC_PREPARE_SSE_USAGE();
501 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
502 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
503
504 IEM_MC_ADVANCE_RIP_AND_FINISH();
505 IEM_MC_END();
506 }
507}
508
509
510/**
511 * Common worker for MMX instructions on the forms:
512 * pxxxx mm1, mm2/mem32
513 *
514 * The 2nd operand is the first half of a register, which in the memory case
515 * means a 32-bit memory access.
516 */
517FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, FNIEMAIMPLMEDIAOPTF2U64, pfnU64)
518{
519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
520 if (IEM_IS_MODRM_REG_MODE(bRm))
521 {
522 /*
523 * MMX, MMX.
524 */
525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
526 IEM_MC_BEGIN(2, 0);
527 IEM_MC_ARG(uint64_t *, puDst, 0);
528 IEM_MC_ARG(uint64_t const *, puSrc, 1);
529 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
530 IEM_MC_PREPARE_FPU_USAGE();
531 IEM_MC_FPU_TO_MMX_MODE();
532
533 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
534 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
535 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
536 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
537
538 IEM_MC_ADVANCE_RIP_AND_FINISH();
539 IEM_MC_END();
540 }
541 else
542 {
543 /*
544 * MMX, [mem32].
545 */
546 IEM_MC_BEGIN(2, 2);
547 IEM_MC_ARG(uint64_t *, puDst, 0);
548 IEM_MC_LOCAL(uint64_t, uSrc);
549 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
551
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
554 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
555 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
556
557 IEM_MC_PREPARE_FPU_USAGE();
558 IEM_MC_FPU_TO_MMX_MODE();
559
560 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
562 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
563
564 IEM_MC_ADVANCE_RIP_AND_FINISH();
565 IEM_MC_END();
566 }
567}
568
569
570/**
571 * Common worker for SSE instructions on the forms:
572 * pxxxx xmm1, xmm2/mem128
573 *
574 * The 2nd operand is the first half of a register, which in the memory case
575 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
576 *
577 * Exceptions type 4.
578 */
579FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
580{
581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
582 if (IEM_IS_MODRM_REG_MODE(bRm))
583 {
584 /*
585 * XMM, XMM.
586 */
587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
588 IEM_MC_BEGIN(2, 0);
589 IEM_MC_ARG(PRTUINT128U, puDst, 0);
590 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
591 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
594 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
595 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
596 IEM_MC_ADVANCE_RIP_AND_FINISH();
597 IEM_MC_END();
598 }
599 else
600 {
601 /*
602 * XMM, [mem128].
603 */
604 IEM_MC_BEGIN(2, 2);
605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
606 IEM_MC_LOCAL(RTUINT128U, uSrc);
607 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
609
610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
612 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
613 /** @todo Most CPUs probably only read the low qword. We read everything to
614 * make sure we apply segmentation and alignment checks correctly.
615 * When we have time, it would be interesting to explore what real
616 * CPUs actually does and whether it will do a TLB load for the high
617 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
618 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
619
620 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
621 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
623
624 IEM_MC_ADVANCE_RIP_AND_FINISH();
625 IEM_MC_END();
626 }
627}
628
629
630/**
631 * Common worker for SSE2 instructions on the forms:
632 * pxxxx xmm1, xmm2/mem128
633 *
634 * The 2nd operand is the first half of a register, which in the memory case
635 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
636 *
637 * Exceptions type 4.
638 */
639FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
640{
641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
642 if (IEM_IS_MODRM_REG_MODE(bRm))
643 {
644 /*
645 * XMM, XMM.
646 */
647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
648 IEM_MC_BEGIN(2, 0);
649 IEM_MC_ARG(PRTUINT128U, puDst, 0);
650 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
651 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
653 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
654 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
655 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
656 IEM_MC_ADVANCE_RIP_AND_FINISH();
657 IEM_MC_END();
658 }
659 else
660 {
661 /*
662 * XMM, [mem128].
663 */
664 IEM_MC_BEGIN(2, 2);
665 IEM_MC_ARG(PRTUINT128U, puDst, 0);
666 IEM_MC_LOCAL(RTUINT128U, uSrc);
667 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
669
670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
672 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
673 /** @todo Most CPUs probably only read the low qword. We read everything to
674 * make sure we apply segmentation and alignment checks correctly.
675 * When we have time, it would be interesting to explore what real
676 * CPUs actually does and whether it will do a TLB load for the high
677 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
678 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
679
680 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
681 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
682 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
683
684 IEM_MC_ADVANCE_RIP_AND_FINISH();
685 IEM_MC_END();
686 }
687}
688
689
690/**
691 * Common worker for MMX instructions on the form:
692 * pxxxx mm1, mm2/mem64
693 *
694 * The 2nd operand is the second half of a register, which in the memory case
695 * means a 64-bit memory access for MMX.
696 */
697FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
698{
699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
700 if (IEM_IS_MODRM_REG_MODE(bRm))
701 {
702 /*
703 * MMX, MMX.
704 */
705 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
706 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
708 IEM_MC_BEGIN(2, 0);
709 IEM_MC_ARG(uint64_t *, puDst, 0);
710 IEM_MC_ARG(uint64_t const *, puSrc, 1);
711 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
712 IEM_MC_PREPARE_FPU_USAGE();
713 IEM_MC_FPU_TO_MMX_MODE();
714
715 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
716 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
717 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
718 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
719
720 IEM_MC_ADVANCE_RIP_AND_FINISH();
721 IEM_MC_END();
722 }
723 else
724 {
725 /*
726 * MMX, [mem64].
727 */
728 IEM_MC_BEGIN(2, 2);
729 IEM_MC_ARG(uint64_t *, puDst, 0);
730 IEM_MC_LOCAL(uint64_t, uSrc);
731 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
733
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
736 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
737 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
738
739 IEM_MC_PREPARE_FPU_USAGE();
740 IEM_MC_FPU_TO_MMX_MODE();
741
742 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
743 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
744 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
745
746 IEM_MC_ADVANCE_RIP_AND_FINISH();
747 IEM_MC_END();
748 }
749}
750
751
752/**
753 * Common worker for SSE instructions on the form:
754 * pxxxx xmm1, xmm2/mem128
755 *
756 * The 2nd operand is the second half of a register, which for SSE a 128-bit
757 * aligned access where it may read the full 128 bits or only the upper 64 bits.
758 *
759 * Exceptions type 4.
760 */
761FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
762{
763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
764 if (IEM_IS_MODRM_REG_MODE(bRm))
765 {
766 /*
767 * XMM, XMM.
768 */
769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
770 IEM_MC_BEGIN(2, 0);
771 IEM_MC_ARG(PRTUINT128U, puDst, 0);
772 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
773 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
774 IEM_MC_PREPARE_SSE_USAGE();
775 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
776 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
777 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
778 IEM_MC_ADVANCE_RIP_AND_FINISH();
779 IEM_MC_END();
780 }
781 else
782 {
783 /*
784 * XMM, [mem128].
785 */
786 IEM_MC_BEGIN(2, 2);
787 IEM_MC_ARG(PRTUINT128U, puDst, 0);
788 IEM_MC_LOCAL(RTUINT128U, uSrc);
789 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
791
792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
794 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
795 /** @todo Most CPUs probably only read the high qword. We read everything to
796 * make sure we apply segmentation and alignment checks correctly.
797 * When we have time, it would be interesting to explore what real
798 * CPUs actually does and whether it will do a TLB load for the lower
799 * part or skip any associated \#PF. */
800 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801
802 IEM_MC_PREPARE_SSE_USAGE();
803 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
804 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
805
806 IEM_MC_ADVANCE_RIP_AND_FINISH();
807 IEM_MC_END();
808 }
809}
810
811
812/**
813 * Common worker for SSE instructions on the forms:
814 * pxxs xmm1, xmm2/mem128
815 *
816 * Proper alignment of the 128-bit operand is enforced.
817 * Exceptions type 2. SSE cpuid checks.
818 *
819 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
820 */
821FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
822{
823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
824 if (IEM_IS_MODRM_REG_MODE(bRm))
825 {
826 /*
827 * XMM128, XMM128.
828 */
829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
830 IEM_MC_BEGIN(3, 1);
831 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
832 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
833 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
834 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
835 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
836 IEM_MC_PREPARE_SSE_USAGE();
837 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
838 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
839 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
840 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
841 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
842
843 IEM_MC_ADVANCE_RIP_AND_FINISH();
844 IEM_MC_END();
845 }
846 else
847 {
848 /*
849 * XMM128, [mem128].
850 */
851 IEM_MC_BEGIN(3, 2);
852 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
853 IEM_MC_LOCAL(X86XMMREG, uSrc2);
854 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
855 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
856 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
858
859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
861 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
862 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
863
864 IEM_MC_PREPARE_SSE_USAGE();
865 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
866 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
867 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
868 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
869
870 IEM_MC_ADVANCE_RIP_AND_FINISH();
871 IEM_MC_END();
872 }
873}
874
875
876/**
877 * Common worker for SSE instructions on the forms:
878 * pxxs xmm1, xmm2/mem32
879 *
880 * Proper alignment of the 128-bit operand is enforced.
881 * Exceptions type 2. SSE cpuid checks.
882 *
883 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
884 */
885FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
886{
887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
888 if (IEM_IS_MODRM_REG_MODE(bRm))
889 {
890 /*
891 * XMM128, XMM32.
892 */
893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
894 IEM_MC_BEGIN(3, 1);
895 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
896 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
897 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
898 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
899 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
900 IEM_MC_PREPARE_SSE_USAGE();
901 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
902 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
903 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
904 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
905 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
906
907 IEM_MC_ADVANCE_RIP_AND_FINISH();
908 IEM_MC_END();
909 }
910 else
911 {
912 /*
913 * XMM128, [mem32].
914 */
915 IEM_MC_BEGIN(3, 2);
916 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
917 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
918 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
919 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
920 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
922
923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
926 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
927
928 IEM_MC_PREPARE_SSE_USAGE();
929 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
930 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
931 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
932 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
933
934 IEM_MC_ADVANCE_RIP_AND_FINISH();
935 IEM_MC_END();
936 }
937}
938
939
940/**
941 * Common worker for SSE2 instructions on the forms:
942 * pxxd xmm1, xmm2/mem128
943 *
944 * Proper alignment of the 128-bit operand is enforced.
945 * Exceptions type 2. SSE cpuid checks.
946 *
947 * @sa iemOpCommonSseFp_FullFull_To_Full
948 */
949FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
950{
951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
952 if (IEM_IS_MODRM_REG_MODE(bRm))
953 {
954 /*
955 * XMM128, XMM128.
956 */
957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
958 IEM_MC_BEGIN(3, 1);
959 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
960 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
961 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
962 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
963 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
964 IEM_MC_PREPARE_SSE_USAGE();
965 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
966 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
967 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
968 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
969 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
970
971 IEM_MC_ADVANCE_RIP_AND_FINISH();
972 IEM_MC_END();
973 }
974 else
975 {
976 /*
977 * XMM128, [mem128].
978 */
979 IEM_MC_BEGIN(3, 2);
980 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
981 IEM_MC_LOCAL(X86XMMREG, uSrc2);
982 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
983 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
984 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
989 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
990 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
991
992 IEM_MC_PREPARE_SSE_USAGE();
993 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
994 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
995 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
996 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
997
998 IEM_MC_ADVANCE_RIP_AND_FINISH();
999 IEM_MC_END();
1000 }
1001}
1002
1003
1004/**
1005 * Common worker for SSE2 instructions on the forms:
1006 * pxxs xmm1, xmm2/mem64
1007 *
1008 * Proper alignment of the 128-bit operand is enforced.
1009 * Exceptions type 2. SSE2 cpuid checks.
1010 *
1011 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1012 */
1013FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
1014{
1015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1016 if (IEM_IS_MODRM_REG_MODE(bRm))
1017 {
1018 /*
1019 * XMM, XMM.
1020 */
1021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1022 IEM_MC_BEGIN(3, 1);
1023 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1024 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1025 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1026 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
1027 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1028 IEM_MC_PREPARE_SSE_USAGE();
1029 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1031 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
1032 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1033 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1034
1035 IEM_MC_ADVANCE_RIP_AND_FINISH();
1036 IEM_MC_END();
1037 }
1038 else
1039 {
1040 /*
1041 * XMM, [mem64].
1042 */
1043 IEM_MC_BEGIN(3, 2);
1044 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1045 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
1046 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1047 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1048 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
1049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1050
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1053 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1054 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1055
1056 IEM_MC_PREPARE_SSE_USAGE();
1057 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1058 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
1059 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1060 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1061
1062 IEM_MC_ADVANCE_RIP_AND_FINISH();
1063 IEM_MC_END();
1064 }
1065}
1066
1067
1068/**
1069 * Common worker for SSE2 instructions on the form:
1070 * pxxxx xmm1, xmm2/mem128
1071 *
1072 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1073 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1074 *
1075 * Exceptions type 4.
1076 */
1077FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1078{
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /*
1083 * XMM, XMM.
1084 */
1085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1086 IEM_MC_BEGIN(2, 0);
1087 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1088 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1089 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1090 IEM_MC_PREPARE_SSE_USAGE();
1091 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1092 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1093 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1094 IEM_MC_ADVANCE_RIP_AND_FINISH();
1095 IEM_MC_END();
1096 }
1097 else
1098 {
1099 /*
1100 * XMM, [mem128].
1101 */
1102 IEM_MC_BEGIN(2, 2);
1103 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1104 IEM_MC_LOCAL(RTUINT128U, uSrc);
1105 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1107
1108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1110 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1111 /** @todo Most CPUs probably only read the high qword. We read everything to
1112 * make sure we apply segmentation and alignment checks correctly.
1113 * When we have time, it would be interesting to explore what real
1114 * CPUs actually does and whether it will do a TLB load for the lower
1115 * part or skip any associated \#PF. */
1116 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1117
1118 IEM_MC_PREPARE_SSE_USAGE();
1119 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1120 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1121
1122 IEM_MC_ADVANCE_RIP_AND_FINISH();
1123 IEM_MC_END();
1124 }
1125}
1126
1127
1128/**
1129 * Common worker for SSE3 instructions on the forms:
1130 * hxxx xmm1, xmm2/mem128
1131 *
1132 * Proper alignment of the 128-bit operand is enforced.
1133 * Exceptions type 2. SSE3 cpuid checks.
1134 *
1135 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1136 */
1137FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1138{
1139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1140 if (IEM_IS_MODRM_REG_MODE(bRm))
1141 {
1142 /*
1143 * XMM, XMM.
1144 */
1145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1146 IEM_MC_BEGIN(3, 1);
1147 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1148 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1149 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1150 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1151 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1152 IEM_MC_PREPARE_SSE_USAGE();
1153 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1154 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1155 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1156 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1157 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1158
1159 IEM_MC_ADVANCE_RIP_AND_FINISH();
1160 IEM_MC_END();
1161 }
1162 else
1163 {
1164 /*
1165 * XMM, [mem128].
1166 */
1167 IEM_MC_BEGIN(3, 2);
1168 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1169 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1170 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1171 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1172 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1174
1175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1177 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1178 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1179
1180 IEM_MC_PREPARE_SSE_USAGE();
1181 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1182 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1183 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1184 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1185
1186 IEM_MC_ADVANCE_RIP_AND_FINISH();
1187 IEM_MC_END();
1188 }
1189}
1190
1191
1192/** Opcode 0x0f 0x00 /0. */
1193FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1194{
1195 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1196 IEMOP_HLP_MIN_286();
1197 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1198
1199 if (IEM_IS_MODRM_REG_MODE(bRm))
1200 {
1201 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1202 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1203 }
1204
1205 /* Ignore operand size here, memory refs are always 16-bit. */
1206 IEM_MC_BEGIN(2, 0);
1207 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1208 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1210 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1211 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1212 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1213 IEM_MC_END();
1214 return VINF_SUCCESS;
1215}
1216
1217
1218/** Opcode 0x0f 0x00 /1. */
1219FNIEMOPRM_DEF(iemOp_Grp6_str)
1220{
1221 IEMOP_MNEMONIC(str, "str Rv/Mw");
1222 IEMOP_HLP_MIN_286();
1223 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1224
1225
1226 if (IEM_IS_MODRM_REG_MODE(bRm))
1227 {
1228 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1229 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1230 }
1231
1232 /* Ignore operand size here, memory refs are always 16-bit. */
1233 IEM_MC_BEGIN(2, 0);
1234 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1235 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1237 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1238 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1239 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1240 IEM_MC_END();
1241 return VINF_SUCCESS;
1242}
1243
1244
1245/** Opcode 0x0f 0x00 /2. */
1246FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1247{
1248 IEMOP_MNEMONIC(lldt, "lldt Ew");
1249 IEMOP_HLP_MIN_286();
1250 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1251
1252 if (IEM_IS_MODRM_REG_MODE(bRm))
1253 {
1254 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1255 IEM_MC_BEGIN(1, 0);
1256 IEM_MC_ARG(uint16_t, u16Sel, 0);
1257 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1258 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1259 IEM_MC_END();
1260 }
1261 else
1262 {
1263 IEM_MC_BEGIN(1, 1);
1264 IEM_MC_ARG(uint16_t, u16Sel, 0);
1265 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1267 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1268 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1269 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1270 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1271 IEM_MC_END();
1272 }
1273 return VINF_SUCCESS;
1274}
1275
1276
1277/** Opcode 0x0f 0x00 /3. */
1278FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1279{
1280 IEMOP_MNEMONIC(ltr, "ltr Ew");
1281 IEMOP_HLP_MIN_286();
1282 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1283
1284 if (IEM_IS_MODRM_REG_MODE(bRm))
1285 {
1286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1287 IEM_MC_BEGIN(1, 0);
1288 IEM_MC_ARG(uint16_t, u16Sel, 0);
1289 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1290 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1291 IEM_MC_END();
1292 }
1293 else
1294 {
1295 IEM_MC_BEGIN(1, 1);
1296 IEM_MC_ARG(uint16_t, u16Sel, 0);
1297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1300 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1301 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1302 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1303 IEM_MC_END();
1304 }
1305 return VINF_SUCCESS;
1306}
1307
1308
1309/** Opcode 0x0f 0x00 /3. */
1310FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1311{
1312 IEMOP_HLP_MIN_286();
1313 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1314
1315 if (IEM_IS_MODRM_REG_MODE(bRm))
1316 {
1317 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1318 IEM_MC_BEGIN(2, 0);
1319 IEM_MC_ARG(uint16_t, u16Sel, 0);
1320 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1321 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1322 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1323 IEM_MC_END();
1324 }
1325 else
1326 {
1327 IEM_MC_BEGIN(2, 1);
1328 IEM_MC_ARG(uint16_t, u16Sel, 0);
1329 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1332 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1333 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1334 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1335 IEM_MC_END();
1336 }
1337 return VINF_SUCCESS;
1338}
1339
1340
1341/** Opcode 0x0f 0x00 /4. */
1342FNIEMOPRM_DEF(iemOp_Grp6_verr)
1343{
1344 IEMOP_MNEMONIC(verr, "verr Ew");
1345 IEMOP_HLP_MIN_286();
1346 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1347}
1348
1349
1350/** Opcode 0x0f 0x00 /5. */
1351FNIEMOPRM_DEF(iemOp_Grp6_verw)
1352{
1353 IEMOP_MNEMONIC(verw, "verw Ew");
1354 IEMOP_HLP_MIN_286();
1355 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1356}
1357
1358
1359/**
1360 * Group 6 jump table.
1361 */
1362IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1363{
1364 iemOp_Grp6_sldt,
1365 iemOp_Grp6_str,
1366 iemOp_Grp6_lldt,
1367 iemOp_Grp6_ltr,
1368 iemOp_Grp6_verr,
1369 iemOp_Grp6_verw,
1370 iemOp_InvalidWithRM,
1371 iemOp_InvalidWithRM
1372};
1373
1374/** Opcode 0x0f 0x00. */
1375FNIEMOP_DEF(iemOp_Grp6)
1376{
1377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1378 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1379}
1380
1381
1382/** Opcode 0x0f 0x01 /0. */
1383FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1384{
1385 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1386 IEMOP_HLP_MIN_286();
1387 IEMOP_HLP_64BIT_OP_SIZE();
1388 IEM_MC_BEGIN(2, 1);
1389 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1390 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1393 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1394 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1395 IEM_MC_END();
1396 return VINF_SUCCESS;
1397}
1398
1399
1400/** Opcode 0x0f 0x01 /0. */
1401FNIEMOP_DEF(iemOp_Grp7_vmcall)
1402{
1403 IEMOP_MNEMONIC(vmcall, "vmcall");
1404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1405
1406 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1407 want all hypercalls regardless of instruction used, and if a
1408 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1409 (NEM/win makes ASSUMPTIONS about this behavior.) */
1410 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
1411}
1412
1413
1414/** Opcode 0x0f 0x01 /0. */
1415#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1416FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1417{
1418 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1419 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1420 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1421 IEMOP_HLP_DONE_DECODING();
1422 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
1423}
1424#else
1425FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1426{
1427 IEMOP_BITCH_ABOUT_STUB();
1428 return IEMOP_RAISE_INVALID_OPCODE();
1429}
1430#endif
1431
1432
1433/** Opcode 0x0f 0x01 /0. */
1434#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1435FNIEMOP_DEF(iemOp_Grp7_vmresume)
1436{
1437 IEMOP_MNEMONIC(vmresume, "vmresume");
1438 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1439 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1440 IEMOP_HLP_DONE_DECODING();
1441 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
1442}
1443#else
1444FNIEMOP_DEF(iemOp_Grp7_vmresume)
1445{
1446 IEMOP_BITCH_ABOUT_STUB();
1447 return IEMOP_RAISE_INVALID_OPCODE();
1448}
1449#endif
1450
1451
1452/** Opcode 0x0f 0x01 /0. */
1453#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1454FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1455{
1456 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1457 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1458 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1459 IEMOP_HLP_DONE_DECODING();
1460 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
1461}
1462#else
1463FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1464{
1465 IEMOP_BITCH_ABOUT_STUB();
1466 return IEMOP_RAISE_INVALID_OPCODE();
1467}
1468#endif
1469
1470
1471/** Opcode 0x0f 0x01 /1. */
1472FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1473{
1474 IEMOP_MNEMONIC(sidt, "sidt Ms");
1475 IEMOP_HLP_MIN_286();
1476 IEMOP_HLP_64BIT_OP_SIZE();
1477 IEM_MC_BEGIN(2, 1);
1478 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1479 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1482 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1483 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1484 IEM_MC_END();
1485 return VINF_SUCCESS;
1486}
1487
1488
1489/** Opcode 0x0f 0x01 /1. */
1490FNIEMOP_DEF(iemOp_Grp7_monitor)
1491{
1492 IEMOP_MNEMONIC(monitor, "monitor");
1493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1494 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1495}
1496
1497
1498/** Opcode 0x0f 0x01 /1. */
1499FNIEMOP_DEF(iemOp_Grp7_mwait)
1500{
1501 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1503 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
1504}
1505
1506
1507/** Opcode 0x0f 0x01 /2. */
1508FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1509{
1510 IEMOP_MNEMONIC(lgdt, "lgdt");
1511 IEMOP_HLP_64BIT_OP_SIZE();
1512 IEM_MC_BEGIN(3, 1);
1513 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1514 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1515 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1518 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1519 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1520 IEM_MC_END();
1521 return VINF_SUCCESS;
1522}
1523
1524
1525/** Opcode 0x0f 0x01 0xd0. */
1526FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1527{
1528 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1529 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1530 {
1531 /** @todo r=ramshankar: We should use
1532 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1533 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1534 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1535 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1536 }
1537 return IEMOP_RAISE_INVALID_OPCODE();
1538}
1539
1540
1541/** Opcode 0x0f 0x01 0xd1. */
1542FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1543{
1544 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1545 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1546 {
1547 /** @todo r=ramshankar: We should use
1548 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1549 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1550 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1551 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1552 }
1553 return IEMOP_RAISE_INVALID_OPCODE();
1554}
1555
1556
1557/** Opcode 0x0f 0x01 /3. */
1558FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1559{
1560 IEMOP_MNEMONIC(lidt, "lidt");
1561 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1562 ? IEMMODE_64BIT
1563 : pVCpu->iem.s.enmEffOpSize;
1564 IEM_MC_BEGIN(3, 1);
1565 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1566 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1567 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1570 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1571 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1572 IEM_MC_END();
1573 return VINF_SUCCESS;
1574}
1575
1576
1577/** Opcode 0x0f 0x01 0xd8. */
1578#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1579FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1580{
1581 IEMOP_MNEMONIC(vmrun, "vmrun");
1582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1583 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
1584}
1585#else
1586FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1587#endif
1588
1589/** Opcode 0x0f 0x01 0xd9. */
1590FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1591{
1592 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1594
1595 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1596 want all hypercalls regardless of instruction used, and if a
1597 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1598 (NEM/win makes ASSUMPTIONS about this behavior.) */
1599 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
1600}
1601
1602/** Opcode 0x0f 0x01 0xda. */
1603#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1604FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1605{
1606 IEMOP_MNEMONIC(vmload, "vmload");
1607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1608 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
1609}
1610#else
1611FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1612#endif
1613
1614
1615/** Opcode 0x0f 0x01 0xdb. */
1616#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1617FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1618{
1619 IEMOP_MNEMONIC(vmsave, "vmsave");
1620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1621 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
1622}
1623#else
1624FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1625#endif
1626
1627
1628/** Opcode 0x0f 0x01 0xdc. */
1629#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1630FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1631{
1632 IEMOP_MNEMONIC(stgi, "stgi");
1633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1634 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
1635}
1636#else
1637FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1638#endif
1639
1640
1641/** Opcode 0x0f 0x01 0xdd. */
1642#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1643FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1644{
1645 IEMOP_MNEMONIC(clgi, "clgi");
1646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1647 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
1648}
1649#else
1650FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1651#endif
1652
1653
1654/** Opcode 0x0f 0x01 0xdf. */
1655#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1656FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1657{
1658 IEMOP_MNEMONIC(invlpga, "invlpga");
1659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1660 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
1661}
1662#else
1663FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1664#endif
1665
1666
1667/** Opcode 0x0f 0x01 0xde. */
1668#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1669FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1670{
1671 IEMOP_MNEMONIC(skinit, "skinit");
1672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1673 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
1674}
1675#else
1676FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1677#endif
1678
1679
1680/** Opcode 0x0f 0x01 /4. */
1681FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1682{
1683 IEMOP_MNEMONIC(smsw, "smsw");
1684 IEMOP_HLP_MIN_286();
1685 if (IEM_IS_MODRM_REG_MODE(bRm))
1686 {
1687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1688 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1689 }
1690
1691 /* Ignore operand size here, memory refs are always 16-bit. */
1692 IEM_MC_BEGIN(2, 0);
1693 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1694 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1697 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1698 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1699 IEM_MC_END();
1700 return VINF_SUCCESS;
1701}
1702
1703
1704/** Opcode 0x0f 0x01 /6. */
1705FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1706{
1707 /* The operand size is effectively ignored, all is 16-bit and only the
1708 lower 3-bits are used. */
1709 IEMOP_MNEMONIC(lmsw, "lmsw");
1710 IEMOP_HLP_MIN_286();
1711 if (IEM_IS_MODRM_REG_MODE(bRm))
1712 {
1713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1714 IEM_MC_BEGIN(2, 0);
1715 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1716 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1717 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1718 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1719 IEM_MC_END();
1720 }
1721 else
1722 {
1723 IEM_MC_BEGIN(2, 0);
1724 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1725 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1728 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1729 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1730 IEM_MC_END();
1731 }
1732 return VINF_SUCCESS;
1733}
1734
1735
1736/** Opcode 0x0f 0x01 /7. */
1737FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1738{
1739 IEMOP_MNEMONIC(invlpg, "invlpg");
1740 IEMOP_HLP_MIN_486();
1741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1742 IEM_MC_BEGIN(1, 1);
1743 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1745 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1746 IEM_MC_END();
1747 return VINF_SUCCESS;
1748}
1749
1750
1751/** Opcode 0x0f 0x01 /7. */
1752FNIEMOP_DEF(iemOp_Grp7_swapgs)
1753{
1754 IEMOP_MNEMONIC(swapgs, "swapgs");
1755 IEMOP_HLP_ONLY_64BIT();
1756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1757 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1758}
1759
1760
1761/** Opcode 0x0f 0x01 /7. */
1762FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1763{
1764 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1766 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
1767}
1768
1769
1770/**
1771 * Group 7 jump table, memory variant.
1772 */
1773IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1774{
1775 iemOp_Grp7_sgdt,
1776 iemOp_Grp7_sidt,
1777 iemOp_Grp7_lgdt,
1778 iemOp_Grp7_lidt,
1779 iemOp_Grp7_smsw,
1780 iemOp_InvalidWithRM,
1781 iemOp_Grp7_lmsw,
1782 iemOp_Grp7_invlpg
1783};
1784
1785
1786/** Opcode 0x0f 0x01. */
1787FNIEMOP_DEF(iemOp_Grp7)
1788{
1789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1790 if (IEM_IS_MODRM_MEM_MODE(bRm))
1791 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1792
1793 switch (IEM_GET_MODRM_REG_8(bRm))
1794 {
1795 case 0:
1796 switch (IEM_GET_MODRM_RM_8(bRm))
1797 {
1798 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1799 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1800 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1801 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1802 }
1803 return IEMOP_RAISE_INVALID_OPCODE();
1804
1805 case 1:
1806 switch (IEM_GET_MODRM_RM_8(bRm))
1807 {
1808 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1809 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1810 }
1811 return IEMOP_RAISE_INVALID_OPCODE();
1812
1813 case 2:
1814 switch (IEM_GET_MODRM_RM_8(bRm))
1815 {
1816 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1817 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1818 }
1819 return IEMOP_RAISE_INVALID_OPCODE();
1820
1821 case 3:
1822 switch (IEM_GET_MODRM_RM_8(bRm))
1823 {
1824 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1825 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1826 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1827 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1828 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1829 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1830 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1831 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1833 }
1834
1835 case 4:
1836 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1837
1838 case 5:
1839 return IEMOP_RAISE_INVALID_OPCODE();
1840
1841 case 6:
1842 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1843
1844 case 7:
1845 switch (IEM_GET_MODRM_RM_8(bRm))
1846 {
1847 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1848 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1849 }
1850 return IEMOP_RAISE_INVALID_OPCODE();
1851
1852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1853 }
1854}
1855
1856/** Opcode 0x0f 0x00 /3. */
1857FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1858{
1859 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1861
1862 if (IEM_IS_MODRM_REG_MODE(bRm))
1863 {
1864 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1865 switch (pVCpu->iem.s.enmEffOpSize)
1866 {
1867 case IEMMODE_16BIT:
1868 {
1869 IEM_MC_BEGIN(3, 0);
1870 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1871 IEM_MC_ARG(uint16_t, u16Sel, 1);
1872 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1873
1874 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1875 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1876 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1877
1878 IEM_MC_END();
1879 return VINF_SUCCESS;
1880 }
1881
1882 case IEMMODE_32BIT:
1883 case IEMMODE_64BIT:
1884 {
1885 IEM_MC_BEGIN(3, 0);
1886 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1887 IEM_MC_ARG(uint16_t, u16Sel, 1);
1888 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1889
1890 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1891 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1892 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1893
1894 IEM_MC_END();
1895 return VINF_SUCCESS;
1896 }
1897
1898 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1899 }
1900 }
1901 else
1902 {
1903 switch (pVCpu->iem.s.enmEffOpSize)
1904 {
1905 case IEMMODE_16BIT:
1906 {
1907 IEM_MC_BEGIN(3, 1);
1908 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1909 IEM_MC_ARG(uint16_t, u16Sel, 1);
1910 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1912
1913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1914 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1915
1916 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1917 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1918 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1919
1920 IEM_MC_END();
1921 return VINF_SUCCESS;
1922 }
1923
1924 case IEMMODE_32BIT:
1925 case IEMMODE_64BIT:
1926 {
1927 IEM_MC_BEGIN(3, 1);
1928 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1929 IEM_MC_ARG(uint16_t, u16Sel, 1);
1930 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1932
1933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1934 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1935/** @todo testcase: make sure it's a 16-bit read. */
1936
1937 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1938 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1939 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1940
1941 IEM_MC_END();
1942 return VINF_SUCCESS;
1943 }
1944
1945 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1946 }
1947 }
1948}
1949
1950
1951
1952/** Opcode 0x0f 0x02. */
1953FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1954{
1955 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1956 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1957}
1958
1959
1960/** Opcode 0x0f 0x03. */
1961FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1962{
1963 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1964 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1965}
1966
1967
1968/** Opcode 0x0f 0x05. */
1969FNIEMOP_DEF(iemOp_syscall)
1970{
1971 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1973 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1974}
1975
1976
1977/** Opcode 0x0f 0x06. */
1978FNIEMOP_DEF(iemOp_clts)
1979{
1980 IEMOP_MNEMONIC(clts, "clts");
1981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1982 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1983}
1984
1985
1986/** Opcode 0x0f 0x07. */
1987FNIEMOP_DEF(iemOp_sysret)
1988{
1989 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1991 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1992}
1993
1994
1995/** Opcode 0x0f 0x08. */
1996FNIEMOP_DEF(iemOp_invd)
1997{
1998 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1999 IEMOP_HLP_MIN_486();
2000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2001 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
2002}
2003
2004
2005/** Opcode 0x0f 0x09. */
2006FNIEMOP_DEF(iemOp_wbinvd)
2007{
2008 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
2009 IEMOP_HLP_MIN_486();
2010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2011 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
2012}
2013
2014
2015/** Opcode 0x0f 0x0b. */
2016FNIEMOP_DEF(iemOp_ud2)
2017{
2018 IEMOP_MNEMONIC(ud2, "ud2");
2019 return IEMOP_RAISE_INVALID_OPCODE();
2020}
2021
2022/** Opcode 0x0f 0x0d. */
2023FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
2024{
2025 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
2026 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
2027 {
2028 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
2029 return IEMOP_RAISE_INVALID_OPCODE();
2030 }
2031
2032 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2033 if (IEM_IS_MODRM_REG_MODE(bRm))
2034 {
2035 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
2036 return IEMOP_RAISE_INVALID_OPCODE();
2037 }
2038
2039 switch (IEM_GET_MODRM_REG_8(bRm))
2040 {
2041 case 2: /* Aliased to /0 for the time being. */
2042 case 4: /* Aliased to /0 for the time being. */
2043 case 5: /* Aliased to /0 for the time being. */
2044 case 6: /* Aliased to /0 for the time being. */
2045 case 7: /* Aliased to /0 for the time being. */
2046 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2047 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2048 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2050 }
2051
2052 IEM_MC_BEGIN(0, 1);
2053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2056 /* Currently a NOP. */
2057 NOREF(GCPtrEffSrc);
2058 IEM_MC_ADVANCE_RIP_AND_FINISH();
2059 IEM_MC_END();
2060}
2061
2062
2063/** Opcode 0x0f 0x0e. */
2064FNIEMOP_DEF(iemOp_femms)
2065{
2066 IEMOP_MNEMONIC(femms, "femms");
2067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2068
2069 IEM_MC_BEGIN(0,0);
2070 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2071 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2072 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2073 IEM_MC_FPU_FROM_MMX_MODE();
2074 IEM_MC_ADVANCE_RIP_AND_FINISH();
2075 IEM_MC_END();
2076}
2077
2078
2079/** Opcode 0x0f 0x0f. */
2080FNIEMOP_DEF(iemOp_3Dnow)
2081{
2082 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2083 {
2084 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2085 return IEMOP_RAISE_INVALID_OPCODE();
2086 }
2087
2088#ifdef IEM_WITH_3DNOW
2089 /* This is pretty sparse, use switch instead of table. */
2090 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2091 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2092#else
2093 IEMOP_BITCH_ABOUT_STUB();
2094 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2095#endif
2096}
2097
2098
2099/**
2100 * @opcode 0x10
2101 * @oppfx none
2102 * @opcpuid sse
2103 * @opgroup og_sse_simdfp_datamove
2104 * @opxcpttype 4UA
2105 * @optest op1=1 op2=2 -> op1=2
2106 * @optest op1=0 op2=-22 -> op1=-22
2107 */
2108FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2109{
2110 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2111 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2112 if (IEM_IS_MODRM_REG_MODE(bRm))
2113 {
2114 /*
2115 * XMM128, XMM128.
2116 */
2117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2118 IEM_MC_BEGIN(0, 0);
2119 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2120 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2121 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2122 IEM_GET_MODRM_RM(pVCpu, bRm));
2123 IEM_MC_ADVANCE_RIP_AND_FINISH();
2124 IEM_MC_END();
2125 }
2126 else
2127 {
2128 /*
2129 * XMM128, [mem128].
2130 */
2131 IEM_MC_BEGIN(0, 2);
2132 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2134
2135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2137 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2138 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2139
2140 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2141 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2142
2143 IEM_MC_ADVANCE_RIP_AND_FINISH();
2144 IEM_MC_END();
2145 }
2146
2147}
2148
2149
2150/**
2151 * @opcode 0x10
2152 * @oppfx 0x66
2153 * @opcpuid sse2
2154 * @opgroup og_sse2_pcksclr_datamove
2155 * @opxcpttype 4UA
2156 * @optest op1=1 op2=2 -> op1=2
2157 * @optest op1=0 op2=-42 -> op1=-42
2158 */
2159FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2160{
2161 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2163 if (IEM_IS_MODRM_REG_MODE(bRm))
2164 {
2165 /*
2166 * XMM128, XMM128.
2167 */
2168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2169 IEM_MC_BEGIN(0, 0);
2170 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2171 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2172 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2173 IEM_GET_MODRM_RM(pVCpu, bRm));
2174 IEM_MC_ADVANCE_RIP_AND_FINISH();
2175 IEM_MC_END();
2176 }
2177 else
2178 {
2179 /*
2180 * XMM128, [mem128].
2181 */
2182 IEM_MC_BEGIN(0, 2);
2183 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2185
2186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2188 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2189 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2190
2191 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2192 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2193
2194 IEM_MC_ADVANCE_RIP_AND_FINISH();
2195 IEM_MC_END();
2196 }
2197}
2198
2199
2200/**
2201 * @opcode 0x10
2202 * @oppfx 0xf3
2203 * @opcpuid sse
2204 * @opgroup og_sse_simdfp_datamove
2205 * @opxcpttype 5
2206 * @optest op1=1 op2=2 -> op1=2
2207 * @optest op1=0 op2=-22 -> op1=-22
2208 */
2209FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2210{
2211 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2213 if (IEM_IS_MODRM_REG_MODE(bRm))
2214 {
2215 /*
2216 * XMM32, XMM32.
2217 */
2218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2219 IEM_MC_BEGIN(0, 1);
2220 IEM_MC_LOCAL(uint32_t, uSrc);
2221
2222 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2223 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2224 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2225 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2226
2227 IEM_MC_ADVANCE_RIP_AND_FINISH();
2228 IEM_MC_END();
2229 }
2230 else
2231 {
2232 /*
2233 * XMM128, [mem32].
2234 */
2235 IEM_MC_BEGIN(0, 2);
2236 IEM_MC_LOCAL(uint32_t, uSrc);
2237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2238
2239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2243
2244 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2245 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2246
2247 IEM_MC_ADVANCE_RIP_AND_FINISH();
2248 IEM_MC_END();
2249 }
2250}
2251
2252
2253/**
2254 * @opcode 0x10
2255 * @oppfx 0xf2
2256 * @opcpuid sse2
2257 * @opgroup og_sse2_pcksclr_datamove
2258 * @opxcpttype 5
2259 * @optest op1=1 op2=2 -> op1=2
2260 * @optest op1=0 op2=-42 -> op1=-42
2261 */
2262FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2263{
2264 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2265 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2266 if (IEM_IS_MODRM_REG_MODE(bRm))
2267 {
2268 /*
2269 * XMM64, XMM64.
2270 */
2271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2272 IEM_MC_BEGIN(0, 1);
2273 IEM_MC_LOCAL(uint64_t, uSrc);
2274
2275 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2276 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2277 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2278 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2279
2280 IEM_MC_ADVANCE_RIP_AND_FINISH();
2281 IEM_MC_END();
2282 }
2283 else
2284 {
2285 /*
2286 * XMM128, [mem64].
2287 */
2288 IEM_MC_BEGIN(0, 2);
2289 IEM_MC_LOCAL(uint64_t, uSrc);
2290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2291
2292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2294 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2295 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2296
2297 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2298 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2299
2300 IEM_MC_ADVANCE_RIP_AND_FINISH();
2301 IEM_MC_END();
2302 }
2303}
2304
2305
2306/**
2307 * @opcode 0x11
2308 * @oppfx none
2309 * @opcpuid sse
2310 * @opgroup og_sse_simdfp_datamove
2311 * @opxcpttype 4UA
2312 * @optest op1=1 op2=2 -> op1=2
2313 * @optest op1=0 op2=-42 -> op1=-42
2314 */
2315FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2316{
2317 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2319 if (IEM_IS_MODRM_REG_MODE(bRm))
2320 {
2321 /*
2322 * XMM128, XMM128.
2323 */
2324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2325 IEM_MC_BEGIN(0, 0);
2326 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2327 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2328 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2329 IEM_GET_MODRM_REG(pVCpu, bRm));
2330 IEM_MC_ADVANCE_RIP_AND_FINISH();
2331 IEM_MC_END();
2332 }
2333 else
2334 {
2335 /*
2336 * [mem128], XMM128.
2337 */
2338 IEM_MC_BEGIN(0, 2);
2339 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2341
2342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2344 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2345 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2346
2347 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2348 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2349
2350 IEM_MC_ADVANCE_RIP_AND_FINISH();
2351 IEM_MC_END();
2352 }
2353}
2354
2355
2356/**
2357 * @opcode 0x11
2358 * @oppfx 0x66
2359 * @opcpuid sse2
2360 * @opgroup og_sse2_pcksclr_datamove
2361 * @opxcpttype 4UA
2362 * @optest op1=1 op2=2 -> op1=2
2363 * @optest op1=0 op2=-42 -> op1=-42
2364 */
2365FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2366{
2367 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2369 if (IEM_IS_MODRM_REG_MODE(bRm))
2370 {
2371 /*
2372 * XMM128, XMM128.
2373 */
2374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2375 IEM_MC_BEGIN(0, 0);
2376 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2377 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2378 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2379 IEM_GET_MODRM_REG(pVCpu, bRm));
2380 IEM_MC_ADVANCE_RIP_AND_FINISH();
2381 IEM_MC_END();
2382 }
2383 else
2384 {
2385 /*
2386 * [mem128], XMM128.
2387 */
2388 IEM_MC_BEGIN(0, 2);
2389 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2391
2392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2394 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2395 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2396
2397 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2398 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2399
2400 IEM_MC_ADVANCE_RIP_AND_FINISH();
2401 IEM_MC_END();
2402 }
2403}
2404
2405
2406/**
2407 * @opcode 0x11
2408 * @oppfx 0xf3
2409 * @opcpuid sse
2410 * @opgroup og_sse_simdfp_datamove
2411 * @opxcpttype 5
2412 * @optest op1=1 op2=2 -> op1=2
2413 * @optest op1=0 op2=-22 -> op1=-22
2414 */
2415FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2416{
2417 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2419 if (IEM_IS_MODRM_REG_MODE(bRm))
2420 {
2421 /*
2422 * XMM32, XMM32.
2423 */
2424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2425 IEM_MC_BEGIN(0, 1);
2426 IEM_MC_LOCAL(uint32_t, uSrc);
2427
2428 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2429 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2430 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2431 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2432
2433 IEM_MC_ADVANCE_RIP_AND_FINISH();
2434 IEM_MC_END();
2435 }
2436 else
2437 {
2438 /*
2439 * [mem32], XMM32.
2440 */
2441 IEM_MC_BEGIN(0, 2);
2442 IEM_MC_LOCAL(uint32_t, uSrc);
2443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2444
2445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2447 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2448 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2449
2450 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2451 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2452
2453 IEM_MC_ADVANCE_RIP_AND_FINISH();
2454 IEM_MC_END();
2455 }
2456}
2457
2458
2459/**
2460 * @opcode 0x11
2461 * @oppfx 0xf2
2462 * @opcpuid sse2
2463 * @opgroup og_sse2_pcksclr_datamove
2464 * @opxcpttype 5
2465 * @optest op1=1 op2=2 -> op1=2
2466 * @optest op1=0 op2=-42 -> op1=-42
2467 */
2468FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2469{
2470 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2472 if (IEM_IS_MODRM_REG_MODE(bRm))
2473 {
2474 /*
2475 * XMM64, XMM64.
2476 */
2477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2478 IEM_MC_BEGIN(0, 1);
2479 IEM_MC_LOCAL(uint64_t, uSrc);
2480
2481 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2482 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2483 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2484 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2485
2486 IEM_MC_ADVANCE_RIP_AND_FINISH();
2487 IEM_MC_END();
2488 }
2489 else
2490 {
2491 /*
2492 * [mem64], XMM64.
2493 */
2494 IEM_MC_BEGIN(0, 2);
2495 IEM_MC_LOCAL(uint64_t, uSrc);
2496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2497
2498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2500 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2502
2503 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2504 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2505
2506 IEM_MC_ADVANCE_RIP_AND_FINISH();
2507 IEM_MC_END();
2508 }
2509}
2510
2511
2512FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2513{
2514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2515 if (IEM_IS_MODRM_REG_MODE(bRm))
2516 {
2517 /**
2518 * @opcode 0x12
2519 * @opcodesub 11 mr/reg
2520 * @oppfx none
2521 * @opcpuid sse
2522 * @opgroup og_sse_simdfp_datamove
2523 * @opxcpttype 5
2524 * @optest op1=1 op2=2 -> op1=2
2525 * @optest op1=0 op2=-42 -> op1=-42
2526 */
2527 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2528
2529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2530 IEM_MC_BEGIN(0, 1);
2531 IEM_MC_LOCAL(uint64_t, uSrc);
2532
2533 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2534 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2535 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2536 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2537
2538 IEM_MC_ADVANCE_RIP_AND_FINISH();
2539 IEM_MC_END();
2540 }
2541 else
2542 {
2543 /**
2544 * @opdone
2545 * @opcode 0x12
2546 * @opcodesub !11 mr/reg
2547 * @oppfx none
2548 * @opcpuid sse
2549 * @opgroup og_sse_simdfp_datamove
2550 * @opxcpttype 5
2551 * @optest op1=1 op2=2 -> op1=2
2552 * @optest op1=0 op2=-42 -> op1=-42
2553 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2554 */
2555 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2556
2557 IEM_MC_BEGIN(0, 2);
2558 IEM_MC_LOCAL(uint64_t, uSrc);
2559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2560
2561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2563 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2564 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2565
2566 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2567 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2568
2569 IEM_MC_ADVANCE_RIP_AND_FINISH();
2570 IEM_MC_END();
2571 }
2572}
2573
2574
2575/**
2576 * @opcode 0x12
2577 * @opcodesub !11 mr/reg
2578 * @oppfx 0x66
2579 * @opcpuid sse2
2580 * @opgroup og_sse2_pcksclr_datamove
2581 * @opxcpttype 5
2582 * @optest op1=1 op2=2 -> op1=2
2583 * @optest op1=0 op2=-42 -> op1=-42
2584 */
2585FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2586{
2587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2588 if (IEM_IS_MODRM_MEM_MODE(bRm))
2589 {
2590 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2591
2592 IEM_MC_BEGIN(0, 2);
2593 IEM_MC_LOCAL(uint64_t, uSrc);
2594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2595
2596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2598 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2599 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2600
2601 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2602 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2603
2604 IEM_MC_ADVANCE_RIP_AND_FINISH();
2605 IEM_MC_END();
2606 }
2607
2608 /**
2609 * @opdone
2610 * @opmnemonic ud660f12m3
2611 * @opcode 0x12
2612 * @opcodesub 11 mr/reg
2613 * @oppfx 0x66
2614 * @opunused immediate
2615 * @opcpuid sse
2616 * @optest ->
2617 */
2618 else
2619 return IEMOP_RAISE_INVALID_OPCODE();
2620}
2621
2622
2623/**
2624 * @opcode 0x12
2625 * @oppfx 0xf3
2626 * @opcpuid sse3
2627 * @opgroup og_sse3_pcksclr_datamove
2628 * @opxcpttype 4
2629 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2630 * op1=0x00000002000000020000000100000001
2631 */
2632FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2633{
2634 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2636 if (IEM_IS_MODRM_REG_MODE(bRm))
2637 {
2638 /*
2639 * Register, register.
2640 */
2641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2642 IEM_MC_BEGIN(2, 0);
2643 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2644 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2645
2646 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2647 IEM_MC_PREPARE_SSE_USAGE();
2648
2649 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2650 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2651 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2652
2653 IEM_MC_ADVANCE_RIP_AND_FINISH();
2654 IEM_MC_END();
2655 }
2656 else
2657 {
2658 /*
2659 * Register, memory.
2660 */
2661 IEM_MC_BEGIN(2, 2);
2662 IEM_MC_LOCAL(RTUINT128U, uSrc);
2663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2664 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2665 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2666
2667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2669 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2670 IEM_MC_PREPARE_SSE_USAGE();
2671
2672 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2673 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2674 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2675
2676 IEM_MC_ADVANCE_RIP_AND_FINISH();
2677 IEM_MC_END();
2678 }
2679}
2680
2681
2682/**
2683 * @opcode 0x12
2684 * @oppfx 0xf2
2685 * @opcpuid sse3
2686 * @opgroup og_sse3_pcksclr_datamove
2687 * @opxcpttype 5
2688 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2689 * op1=0x22222222111111112222222211111111
2690 */
2691FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2692{
2693 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2694 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2695 if (IEM_IS_MODRM_REG_MODE(bRm))
2696 {
2697 /*
2698 * Register, register.
2699 */
2700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2701 IEM_MC_BEGIN(2, 0);
2702 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2703 IEM_MC_ARG(uint64_t, uSrc, 1);
2704
2705 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2706 IEM_MC_PREPARE_SSE_USAGE();
2707
2708 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2709 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2710 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2711
2712 IEM_MC_ADVANCE_RIP_AND_FINISH();
2713 IEM_MC_END();
2714 }
2715 else
2716 {
2717 /*
2718 * Register, memory.
2719 */
2720 IEM_MC_BEGIN(2, 2);
2721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2722 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2723 IEM_MC_ARG(uint64_t, uSrc, 1);
2724
2725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2727 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2728 IEM_MC_PREPARE_SSE_USAGE();
2729
2730 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2731 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2732 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2733
2734 IEM_MC_ADVANCE_RIP_AND_FINISH();
2735 IEM_MC_END();
2736 }
2737}
2738
2739
2740/**
2741 * @opcode 0x13
2742 * @opcodesub !11 mr/reg
2743 * @oppfx none
2744 * @opcpuid sse
2745 * @opgroup og_sse_simdfp_datamove
2746 * @opxcpttype 5
2747 * @optest op1=1 op2=2 -> op1=2
2748 * @optest op1=0 op2=-42 -> op1=-42
2749 */
2750FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2751{
2752 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2753 if (IEM_IS_MODRM_MEM_MODE(bRm))
2754 {
2755 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2756
2757 IEM_MC_BEGIN(0, 2);
2758 IEM_MC_LOCAL(uint64_t, uSrc);
2759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2760
2761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2763 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2764 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2765
2766 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2767 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2768
2769 IEM_MC_ADVANCE_RIP_AND_FINISH();
2770 IEM_MC_END();
2771 }
2772
2773 /**
2774 * @opdone
2775 * @opmnemonic ud0f13m3
2776 * @opcode 0x13
2777 * @opcodesub 11 mr/reg
2778 * @oppfx none
2779 * @opunused immediate
2780 * @opcpuid sse
2781 * @optest ->
2782 */
2783 else
2784 return IEMOP_RAISE_INVALID_OPCODE();
2785}
2786
2787
2788/**
2789 * @opcode 0x13
2790 * @opcodesub !11 mr/reg
2791 * @oppfx 0x66
2792 * @opcpuid sse2
2793 * @opgroup og_sse2_pcksclr_datamove
2794 * @opxcpttype 5
2795 * @optest op1=1 op2=2 -> op1=2
2796 * @optest op1=0 op2=-42 -> op1=-42
2797 */
2798FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2799{
2800 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2801 if (IEM_IS_MODRM_MEM_MODE(bRm))
2802 {
2803 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2804 IEM_MC_BEGIN(0, 2);
2805 IEM_MC_LOCAL(uint64_t, uSrc);
2806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2807
2808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2810 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2811 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2812
2813 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2814 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2815
2816 IEM_MC_ADVANCE_RIP_AND_FINISH();
2817 IEM_MC_END();
2818 }
2819
2820 /**
2821 * @opdone
2822 * @opmnemonic ud660f13m3
2823 * @opcode 0x13
2824 * @opcodesub 11 mr/reg
2825 * @oppfx 0x66
2826 * @opunused immediate
2827 * @opcpuid sse
2828 * @optest ->
2829 */
2830 else
2831 return IEMOP_RAISE_INVALID_OPCODE();
2832}
2833
2834
2835/**
2836 * @opmnemonic udf30f13
2837 * @opcode 0x13
2838 * @oppfx 0xf3
2839 * @opunused intel-modrm
2840 * @opcpuid sse
2841 * @optest ->
2842 * @opdone
2843 */
2844
2845/**
2846 * @opmnemonic udf20f13
2847 * @opcode 0x13
2848 * @oppfx 0xf2
2849 * @opunused intel-modrm
2850 * @opcpuid sse
2851 * @optest ->
2852 * @opdone
2853 */
2854
2855/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2856FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2857{
2858 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2859 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2860}
2861
2862
2863/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2864FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2865{
2866 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2867 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2868}
2869
2870
2871/**
2872 * @opdone
2873 * @opmnemonic udf30f14
2874 * @opcode 0x14
2875 * @oppfx 0xf3
2876 * @opunused intel-modrm
2877 * @opcpuid sse
2878 * @optest ->
2879 * @opdone
2880 */
2881
2882/**
2883 * @opmnemonic udf20f14
2884 * @opcode 0x14
2885 * @oppfx 0xf2
2886 * @opunused intel-modrm
2887 * @opcpuid sse
2888 * @optest ->
2889 * @opdone
2890 */
2891
2892/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2893FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2894{
2895 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2896 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2897}
2898
2899
2900/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2901FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2902{
2903 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2904 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2905}
2906
2907
2908/* Opcode 0xf3 0x0f 0x15 - invalid */
2909/* Opcode 0xf2 0x0f 0x15 - invalid */
2910
2911/**
2912 * @opdone
2913 * @opmnemonic udf30f15
2914 * @opcode 0x15
2915 * @oppfx 0xf3
2916 * @opunused intel-modrm
2917 * @opcpuid sse
2918 * @optest ->
2919 * @opdone
2920 */
2921
2922/**
2923 * @opmnemonic udf20f15
2924 * @opcode 0x15
2925 * @oppfx 0xf2
2926 * @opunused intel-modrm
2927 * @opcpuid sse
2928 * @optest ->
2929 * @opdone
2930 */
2931
2932FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2933{
2934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2935 if (IEM_IS_MODRM_REG_MODE(bRm))
2936 {
2937 /**
2938 * @opcode 0x16
2939 * @opcodesub 11 mr/reg
2940 * @oppfx none
2941 * @opcpuid sse
2942 * @opgroup og_sse_simdfp_datamove
2943 * @opxcpttype 5
2944 * @optest op1=1 op2=2 -> op1=2
2945 * @optest op1=0 op2=-42 -> op1=-42
2946 */
2947 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2948
2949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2950 IEM_MC_BEGIN(0, 1);
2951 IEM_MC_LOCAL(uint64_t, uSrc);
2952
2953 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2954 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2955 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2956 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2957
2958 IEM_MC_ADVANCE_RIP_AND_FINISH();
2959 IEM_MC_END();
2960 }
2961 else
2962 {
2963 /**
2964 * @opdone
2965 * @opcode 0x16
2966 * @opcodesub !11 mr/reg
2967 * @oppfx none
2968 * @opcpuid sse
2969 * @opgroup og_sse_simdfp_datamove
2970 * @opxcpttype 5
2971 * @optest op1=1 op2=2 -> op1=2
2972 * @optest op1=0 op2=-42 -> op1=-42
2973 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2974 */
2975 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2976
2977 IEM_MC_BEGIN(0, 2);
2978 IEM_MC_LOCAL(uint64_t, uSrc);
2979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2980
2981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2983 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2984 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2985
2986 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2987 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2988
2989 IEM_MC_ADVANCE_RIP_AND_FINISH();
2990 IEM_MC_END();
2991 }
2992}
2993
2994
2995/**
2996 * @opcode 0x16
2997 * @opcodesub !11 mr/reg
2998 * @oppfx 0x66
2999 * @opcpuid sse2
3000 * @opgroup og_sse2_pcksclr_datamove
3001 * @opxcpttype 5
3002 * @optest op1=1 op2=2 -> op1=2
3003 * @optest op1=0 op2=-42 -> op1=-42
3004 */
3005FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
3006{
3007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3008 if (IEM_IS_MODRM_MEM_MODE(bRm))
3009 {
3010 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3011 IEM_MC_BEGIN(0, 2);
3012 IEM_MC_LOCAL(uint64_t, uSrc);
3013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3014
3015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3017 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3018 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3019
3020 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3021 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3022
3023 IEM_MC_ADVANCE_RIP_AND_FINISH();
3024 IEM_MC_END();
3025 }
3026
3027 /**
3028 * @opdone
3029 * @opmnemonic ud660f16m3
3030 * @opcode 0x16
3031 * @opcodesub 11 mr/reg
3032 * @oppfx 0x66
3033 * @opunused immediate
3034 * @opcpuid sse
3035 * @optest ->
3036 */
3037 else
3038 return IEMOP_RAISE_INVALID_OPCODE();
3039}
3040
3041
3042/**
3043 * @opcode 0x16
3044 * @oppfx 0xf3
3045 * @opcpuid sse3
3046 * @opgroup og_sse3_pcksclr_datamove
3047 * @opxcpttype 4
3048 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3049 * op1=0x00000002000000020000000100000001
3050 */
3051FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3052{
3053 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3055 if (IEM_IS_MODRM_REG_MODE(bRm))
3056 {
3057 /*
3058 * XMM128, XMM128.
3059 */
3060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3061 IEM_MC_BEGIN(2, 0);
3062 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3063 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3064
3065 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3066 IEM_MC_PREPARE_SSE_USAGE();
3067
3068 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3069 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3070 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3071
3072 IEM_MC_ADVANCE_RIP_AND_FINISH();
3073 IEM_MC_END();
3074 }
3075 else
3076 {
3077 /*
3078 * XMM128, [mem128].
3079 */
3080 IEM_MC_BEGIN(2, 2);
3081 IEM_MC_LOCAL(RTUINT128U, uSrc);
3082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3083 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3084 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3085
3086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3088 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3089 IEM_MC_PREPARE_SSE_USAGE();
3090
3091 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3092 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3093 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3094
3095 IEM_MC_ADVANCE_RIP_AND_FINISH();
3096 IEM_MC_END();
3097 }
3098}
3099
3100/**
3101 * @opdone
3102 * @opmnemonic udf30f16
3103 * @opcode 0x16
3104 * @oppfx 0xf2
3105 * @opunused intel-modrm
3106 * @opcpuid sse
3107 * @optest ->
3108 * @opdone
3109 */
3110
3111
3112/**
3113 * @opcode 0x17
3114 * @opcodesub !11 mr/reg
3115 * @oppfx none
3116 * @opcpuid sse
3117 * @opgroup og_sse_simdfp_datamove
3118 * @opxcpttype 5
3119 * @optest op1=1 op2=2 -> op1=2
3120 * @optest op1=0 op2=-42 -> op1=-42
3121 */
3122FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3123{
3124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3125 if (IEM_IS_MODRM_MEM_MODE(bRm))
3126 {
3127 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3128
3129 IEM_MC_BEGIN(0, 2);
3130 IEM_MC_LOCAL(uint64_t, uSrc);
3131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3132
3133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3135 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3136 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3137
3138 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3139 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3140
3141 IEM_MC_ADVANCE_RIP_AND_FINISH();
3142 IEM_MC_END();
3143 }
3144
3145 /**
3146 * @opdone
3147 * @opmnemonic ud0f17m3
3148 * @opcode 0x17
3149 * @opcodesub 11 mr/reg
3150 * @oppfx none
3151 * @opunused immediate
3152 * @opcpuid sse
3153 * @optest ->
3154 */
3155 else
3156 return IEMOP_RAISE_INVALID_OPCODE();
3157}
3158
3159
3160/**
3161 * @opcode 0x17
3162 * @opcodesub !11 mr/reg
3163 * @oppfx 0x66
3164 * @opcpuid sse2
3165 * @opgroup og_sse2_pcksclr_datamove
3166 * @opxcpttype 5
3167 * @optest op1=1 op2=2 -> op1=2
3168 * @optest op1=0 op2=-42 -> op1=-42
3169 */
3170FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3171{
3172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3173 if (IEM_IS_MODRM_MEM_MODE(bRm))
3174 {
3175 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3176
3177 IEM_MC_BEGIN(0, 2);
3178 IEM_MC_LOCAL(uint64_t, uSrc);
3179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3180
3181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3183 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3184 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3185
3186 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3187 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3188
3189 IEM_MC_ADVANCE_RIP_AND_FINISH();
3190 IEM_MC_END();
3191 }
3192
3193 /**
3194 * @opdone
3195 * @opmnemonic ud660f17m3
3196 * @opcode 0x17
3197 * @opcodesub 11 mr/reg
3198 * @oppfx 0x66
3199 * @opunused immediate
3200 * @opcpuid sse
3201 * @optest ->
3202 */
3203 else
3204 return IEMOP_RAISE_INVALID_OPCODE();
3205}
3206
3207
3208/**
3209 * @opdone
3210 * @opmnemonic udf30f17
3211 * @opcode 0x17
3212 * @oppfx 0xf3
3213 * @opunused intel-modrm
3214 * @opcpuid sse
3215 * @optest ->
3216 * @opdone
3217 */
3218
3219/**
3220 * @opmnemonic udf20f17
3221 * @opcode 0x17
3222 * @oppfx 0xf2
3223 * @opunused intel-modrm
3224 * @opcpuid sse
3225 * @optest ->
3226 * @opdone
3227 */
3228
3229
3230/** Opcode 0x0f 0x18. */
3231FNIEMOP_DEF(iemOp_prefetch_Grp16)
3232{
3233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3234 if (IEM_IS_MODRM_MEM_MODE(bRm))
3235 {
3236 switch (IEM_GET_MODRM_REG_8(bRm))
3237 {
3238 case 4: /* Aliased to /0 for the time being according to AMD. */
3239 case 5: /* Aliased to /0 for the time being according to AMD. */
3240 case 6: /* Aliased to /0 for the time being according to AMD. */
3241 case 7: /* Aliased to /0 for the time being according to AMD. */
3242 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3243 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3244 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3245 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3246 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3247 }
3248
3249 IEM_MC_BEGIN(0, 1);
3250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3253 /* Currently a NOP. */
3254 NOREF(GCPtrEffSrc);
3255 IEM_MC_ADVANCE_RIP_AND_FINISH();
3256 IEM_MC_END();
3257 }
3258 else
3259 return IEMOP_RAISE_INVALID_OPCODE();
3260}
3261
3262
3263/** Opcode 0x0f 0x19..0x1f. */
3264FNIEMOP_DEF(iemOp_nop_Ev)
3265{
3266 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3267 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3268 if (IEM_IS_MODRM_REG_MODE(bRm))
3269 {
3270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3271 IEM_MC_BEGIN(0, 0);
3272 IEM_MC_ADVANCE_RIP_AND_FINISH();
3273 IEM_MC_END();
3274 }
3275 else
3276 {
3277 IEM_MC_BEGIN(0, 1);
3278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3281 /* Currently a NOP. */
3282 NOREF(GCPtrEffSrc);
3283 IEM_MC_ADVANCE_RIP_AND_FINISH();
3284 IEM_MC_END();
3285 }
3286}
3287
3288
3289/** Opcode 0x0f 0x20. */
3290FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3291{
3292 /* mod is ignored, as is operand size overrides. */
3293 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3294 IEMOP_HLP_MIN_386();
3295 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3296 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3297 else
3298 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3299
3300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3301 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3302 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3303 {
3304 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3305 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3306 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3307 iCrReg |= 8;
3308 }
3309 switch (iCrReg)
3310 {
3311 case 0: case 2: case 3: case 4: case 8:
3312 break;
3313 default:
3314 return IEMOP_RAISE_INVALID_OPCODE();
3315 }
3316 IEMOP_HLP_DONE_DECODING();
3317
3318 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3319}
3320
3321
3322/** Opcode 0x0f 0x21. */
3323FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3324{
3325 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3326 IEMOP_HLP_MIN_386();
3327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3329 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3330 return IEMOP_RAISE_INVALID_OPCODE();
3331 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
3332 IEM_GET_MODRM_RM(pVCpu, bRm),
3333 IEM_GET_MODRM_REG_8(bRm));
3334}
3335
3336
3337/** Opcode 0x0f 0x22. */
3338FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3339{
3340 /* mod is ignored, as is operand size overrides. */
3341 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3342 IEMOP_HLP_MIN_386();
3343 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3344 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3345 else
3346 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3347
3348 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3349 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3350 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3351 {
3352 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3353 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3354 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3355 iCrReg |= 8;
3356 }
3357 switch (iCrReg)
3358 {
3359 case 0: case 2: case 3: case 4: case 8:
3360 break;
3361 default:
3362 return IEMOP_RAISE_INVALID_OPCODE();
3363 }
3364 IEMOP_HLP_DONE_DECODING();
3365
3366 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3367}
3368
3369
3370/** Opcode 0x0f 0x23. */
3371FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3372{
3373 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3374 IEMOP_HLP_MIN_386();
3375 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3377 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3378 return IEMOP_RAISE_INVALID_OPCODE();
3379 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
3380 IEM_GET_MODRM_REG_8(bRm),
3381 IEM_GET_MODRM_RM(pVCpu, bRm));
3382}
3383
3384
3385/** Opcode 0x0f 0x24. */
3386FNIEMOP_DEF(iemOp_mov_Rd_Td)
3387{
3388 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3389 IEMOP_HLP_MIN_386();
3390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3392 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3393 return IEMOP_RAISE_INVALID_OPCODE();
3394 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
3395 IEM_GET_MODRM_RM(pVCpu, bRm),
3396 IEM_GET_MODRM_REG_8(bRm));
3397}
3398
3399
3400/** Opcode 0x0f 0x26. */
3401FNIEMOP_DEF(iemOp_mov_Td_Rd)
3402{
3403 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3404 IEMOP_HLP_MIN_386();
3405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3407 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3408 return IEMOP_RAISE_INVALID_OPCODE();
3409 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
3410 IEM_GET_MODRM_REG_8(bRm),
3411 IEM_GET_MODRM_RM(pVCpu, bRm));
3412}
3413
3414
3415/**
3416 * @opcode 0x28
3417 * @oppfx none
3418 * @opcpuid sse
3419 * @opgroup og_sse_simdfp_datamove
3420 * @opxcpttype 1
3421 * @optest op1=1 op2=2 -> op1=2
3422 * @optest op1=0 op2=-42 -> op1=-42
3423 */
3424FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3425{
3426 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3427 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3428 if (IEM_IS_MODRM_REG_MODE(bRm))
3429 {
3430 /*
3431 * Register, register.
3432 */
3433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3434 IEM_MC_BEGIN(0, 0);
3435 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3436 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3437 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3438 IEM_GET_MODRM_RM(pVCpu, bRm));
3439 IEM_MC_ADVANCE_RIP_AND_FINISH();
3440 IEM_MC_END();
3441 }
3442 else
3443 {
3444 /*
3445 * Register, memory.
3446 */
3447 IEM_MC_BEGIN(0, 2);
3448 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3450
3451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3453 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3454 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3455
3456 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3457 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3458
3459 IEM_MC_ADVANCE_RIP_AND_FINISH();
3460 IEM_MC_END();
3461 }
3462}
3463
3464/**
3465 * @opcode 0x28
3466 * @oppfx 66
3467 * @opcpuid sse2
3468 * @opgroup og_sse2_pcksclr_datamove
3469 * @opxcpttype 1
3470 * @optest op1=1 op2=2 -> op1=2
3471 * @optest op1=0 op2=-42 -> op1=-42
3472 */
3473FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3474{
3475 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3476 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3477 if (IEM_IS_MODRM_REG_MODE(bRm))
3478 {
3479 /*
3480 * Register, register.
3481 */
3482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3483 IEM_MC_BEGIN(0, 0);
3484 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3485 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3486 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3487 IEM_GET_MODRM_RM(pVCpu, bRm));
3488 IEM_MC_ADVANCE_RIP_AND_FINISH();
3489 IEM_MC_END();
3490 }
3491 else
3492 {
3493 /*
3494 * Register, memory.
3495 */
3496 IEM_MC_BEGIN(0, 2);
3497 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3499
3500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3502 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3503 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3504
3505 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3506 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3507
3508 IEM_MC_ADVANCE_RIP_AND_FINISH();
3509 IEM_MC_END();
3510 }
3511}
3512
3513/* Opcode 0xf3 0x0f 0x28 - invalid */
3514/* Opcode 0xf2 0x0f 0x28 - invalid */
3515
3516/**
3517 * @opcode 0x29
3518 * @oppfx none
3519 * @opcpuid sse
3520 * @opgroup og_sse_simdfp_datamove
3521 * @opxcpttype 1
3522 * @optest op1=1 op2=2 -> op1=2
3523 * @optest op1=0 op2=-42 -> op1=-42
3524 */
3525FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3526{
3527 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3529 if (IEM_IS_MODRM_REG_MODE(bRm))
3530 {
3531 /*
3532 * Register, register.
3533 */
3534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3535 IEM_MC_BEGIN(0, 0);
3536 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3537 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3538 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3539 IEM_GET_MODRM_REG(pVCpu, bRm));
3540 IEM_MC_ADVANCE_RIP_AND_FINISH();
3541 IEM_MC_END();
3542 }
3543 else
3544 {
3545 /*
3546 * Memory, register.
3547 */
3548 IEM_MC_BEGIN(0, 2);
3549 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3551
3552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3554 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3555 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3556
3557 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3558 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3559
3560 IEM_MC_ADVANCE_RIP_AND_FINISH();
3561 IEM_MC_END();
3562 }
3563}
3564
3565/**
3566 * @opcode 0x29
3567 * @oppfx 66
3568 * @opcpuid sse2
3569 * @opgroup og_sse2_pcksclr_datamove
3570 * @opxcpttype 1
3571 * @optest op1=1 op2=2 -> op1=2
3572 * @optest op1=0 op2=-42 -> op1=-42
3573 */
3574FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3575{
3576 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3577 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3578 if (IEM_IS_MODRM_REG_MODE(bRm))
3579 {
3580 /*
3581 * Register, register.
3582 */
3583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3584 IEM_MC_BEGIN(0, 0);
3585 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3586 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3587 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3588 IEM_GET_MODRM_REG(pVCpu, bRm));
3589 IEM_MC_ADVANCE_RIP_AND_FINISH();
3590 IEM_MC_END();
3591 }
3592 else
3593 {
3594 /*
3595 * Memory, register.
3596 */
3597 IEM_MC_BEGIN(0, 2);
3598 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3600
3601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3603 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3604 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3605
3606 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3607 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3608
3609 IEM_MC_ADVANCE_RIP_AND_FINISH();
3610 IEM_MC_END();
3611 }
3612}
3613
3614/* Opcode 0xf3 0x0f 0x29 - invalid */
3615/* Opcode 0xf2 0x0f 0x29 - invalid */
3616
3617
3618/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3619FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3620{
3621 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
3622 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3623 if (IEM_IS_MODRM_REG_MODE(bRm))
3624 {
3625 /*
3626 * XMM, MMX
3627 */
3628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3629
3630 IEM_MC_BEGIN(3, 1);
3631 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3632 IEM_MC_LOCAL(X86XMMREG, Dst);
3633 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3634 IEM_MC_ARG(uint64_t, u64Src, 2);
3635 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3636 IEM_MC_PREPARE_FPU_USAGE();
3637 IEM_MC_FPU_TO_MMX_MODE();
3638
3639 IEM_MC_REF_MXCSR(pfMxcsr);
3640 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3641 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3642
3643 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3644 IEM_MC_IF_MXCSR_XCPT_PENDING()
3645 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3646 IEM_MC_ELSE()
3647 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3648 IEM_MC_ENDIF();
3649
3650 IEM_MC_ADVANCE_RIP_AND_FINISH();
3651 IEM_MC_END();
3652 }
3653 else
3654 {
3655 /*
3656 * XMM, [mem64]
3657 */
3658 IEM_MC_BEGIN(3, 2);
3659 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3660 IEM_MC_LOCAL(X86XMMREG, Dst);
3661 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3662 IEM_MC_ARG(uint64_t, u64Src, 2);
3663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3664
3665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3667 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3668 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3669
3670 IEM_MC_PREPARE_FPU_USAGE();
3671 IEM_MC_FPU_TO_MMX_MODE();
3672 IEM_MC_REF_MXCSR(pfMxcsr);
3673
3674 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3675 IEM_MC_IF_MXCSR_XCPT_PENDING()
3676 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3677 IEM_MC_ELSE()
3678 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3679 IEM_MC_ENDIF();
3680
3681 IEM_MC_ADVANCE_RIP_AND_FINISH();
3682 IEM_MC_END();
3683 }
3684}
3685
3686
3687/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3688FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3689{
3690 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
3691 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3692 if (IEM_IS_MODRM_REG_MODE(bRm))
3693 {
3694 /*
3695 * XMM, MMX
3696 */
3697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3698
3699 IEM_MC_BEGIN(3, 1);
3700 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3701 IEM_MC_LOCAL(X86XMMREG, Dst);
3702 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3703 IEM_MC_ARG(uint64_t, u64Src, 2);
3704 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3705 IEM_MC_PREPARE_FPU_USAGE();
3706 IEM_MC_FPU_TO_MMX_MODE();
3707
3708 IEM_MC_REF_MXCSR(pfMxcsr);
3709 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3710
3711 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3712 IEM_MC_IF_MXCSR_XCPT_PENDING()
3713 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3714 IEM_MC_ELSE()
3715 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3716 IEM_MC_ENDIF();
3717
3718 IEM_MC_ADVANCE_RIP_AND_FINISH();
3719 IEM_MC_END();
3720 }
3721 else
3722 {
3723 /*
3724 * XMM, [mem64]
3725 */
3726 IEM_MC_BEGIN(3, 3);
3727 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3728 IEM_MC_LOCAL(X86XMMREG, Dst);
3729 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3730 IEM_MC_ARG(uint64_t, u64Src, 2);
3731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3732
3733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3735 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3736 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3737
3738 /* Doesn't cause a transition to MMX mode. */
3739 IEM_MC_PREPARE_SSE_USAGE();
3740 IEM_MC_REF_MXCSR(pfMxcsr);
3741
3742 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3743 IEM_MC_IF_MXCSR_XCPT_PENDING()
3744 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3745 IEM_MC_ELSE()
3746 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3747 IEM_MC_ENDIF();
3748
3749 IEM_MC_ADVANCE_RIP_AND_FINISH();
3750 IEM_MC_END();
3751 }
3752}
3753
3754
3755/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3756FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3757{
3758 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3759
3760 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3761 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3762 {
3763 if (IEM_IS_MODRM_REG_MODE(bRm))
3764 {
3765 /* XMM, greg64 */
3766 IEM_MC_BEGIN(3, 2);
3767 IEM_MC_LOCAL(uint32_t, fMxcsr);
3768 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3769 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3770 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3771 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3772
3773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3774 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3775 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3776
3777 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3778 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3779 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3780 IEM_MC_IF_MXCSR_XCPT_PENDING()
3781 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3782 IEM_MC_ELSE()
3783 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3784 IEM_MC_ENDIF();
3785
3786 IEM_MC_ADVANCE_RIP_AND_FINISH();
3787 IEM_MC_END();
3788 }
3789 else
3790 {
3791 /* XMM, [mem64] */
3792 IEM_MC_BEGIN(3, 4);
3793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3794 IEM_MC_LOCAL(uint32_t, fMxcsr);
3795 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3796 IEM_MC_LOCAL(int64_t, i64Src);
3797 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3798 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3799 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3800
3801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3803 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3804 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3805
3806 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3807 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3808 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3809 IEM_MC_IF_MXCSR_XCPT_PENDING()
3810 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3811 IEM_MC_ELSE()
3812 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3813 IEM_MC_ENDIF();
3814
3815 IEM_MC_ADVANCE_RIP_AND_FINISH();
3816 IEM_MC_END();
3817 }
3818 }
3819 else
3820 {
3821 if (IEM_IS_MODRM_REG_MODE(bRm))
3822 {
3823 /* greg, XMM */
3824 IEM_MC_BEGIN(3, 2);
3825 IEM_MC_LOCAL(uint32_t, fMxcsr);
3826 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3827 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3828 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3829 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3830
3831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3832 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3833 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3834
3835 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3836 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3837 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3838 IEM_MC_IF_MXCSR_XCPT_PENDING()
3839 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3840 IEM_MC_ELSE()
3841 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3842 IEM_MC_ENDIF();
3843
3844 IEM_MC_ADVANCE_RIP_AND_FINISH();
3845 IEM_MC_END();
3846 }
3847 else
3848 {
3849 /* greg, [mem32] */
3850 IEM_MC_BEGIN(3, 4);
3851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3852 IEM_MC_LOCAL(uint32_t, fMxcsr);
3853 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3854 IEM_MC_LOCAL(int32_t, i32Src);
3855 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3856 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3857 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3858
3859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3861 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3862 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3863
3864 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3865 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3866 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3867 IEM_MC_IF_MXCSR_XCPT_PENDING()
3868 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3869 IEM_MC_ELSE()
3870 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3871 IEM_MC_ENDIF();
3872
3873 IEM_MC_ADVANCE_RIP_AND_FINISH();
3874 IEM_MC_END();
3875 }
3876 }
3877}
3878
3879
3880/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3881FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3882{
3883 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3884
3885 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3886 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3887 {
3888 if (IEM_IS_MODRM_REG_MODE(bRm))
3889 {
3890 /* XMM, greg64 */
3891 IEM_MC_BEGIN(3, 2);
3892 IEM_MC_LOCAL(uint32_t, fMxcsr);
3893 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3894 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3895 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3896 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3897
3898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3899 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3900 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3901
3902 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3903 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3904 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3905 IEM_MC_IF_MXCSR_XCPT_PENDING()
3906 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3907 IEM_MC_ELSE()
3908 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3909 IEM_MC_ENDIF();
3910
3911 IEM_MC_ADVANCE_RIP_AND_FINISH();
3912 IEM_MC_END();
3913 }
3914 else
3915 {
3916 /* XMM, [mem64] */
3917 IEM_MC_BEGIN(3, 4);
3918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3919 IEM_MC_LOCAL(uint32_t, fMxcsr);
3920 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3921 IEM_MC_LOCAL(int64_t, i64Src);
3922 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3923 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3924 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3925
3926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3928 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3929 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3930
3931 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3932 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3933 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3934 IEM_MC_IF_MXCSR_XCPT_PENDING()
3935 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3936 IEM_MC_ELSE()
3937 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3938 IEM_MC_ENDIF();
3939
3940 IEM_MC_ADVANCE_RIP_AND_FINISH();
3941 IEM_MC_END();
3942 }
3943 }
3944 else
3945 {
3946 if (IEM_IS_MODRM_REG_MODE(bRm))
3947 {
3948 /* XMM, greg32 */
3949 IEM_MC_BEGIN(3, 2);
3950 IEM_MC_LOCAL(uint32_t, fMxcsr);
3951 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3952 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3953 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3954 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3955
3956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3957 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3958 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3959
3960 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3961 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3962 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3963 IEM_MC_IF_MXCSR_XCPT_PENDING()
3964 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3965 IEM_MC_ELSE()
3966 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3967 IEM_MC_ENDIF();
3968
3969 IEM_MC_ADVANCE_RIP_AND_FINISH();
3970 IEM_MC_END();
3971 }
3972 else
3973 {
3974 /* XMM, [mem32] */
3975 IEM_MC_BEGIN(3, 4);
3976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3977 IEM_MC_LOCAL(uint32_t, fMxcsr);
3978 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3979 IEM_MC_LOCAL(int32_t, i32Src);
3980 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3981 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3982 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3983
3984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3986 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3987 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3988
3989 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3990 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3991 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3992 IEM_MC_IF_MXCSR_XCPT_PENDING()
3993 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3994 IEM_MC_ELSE()
3995 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3996 IEM_MC_ENDIF();
3997
3998 IEM_MC_ADVANCE_RIP_AND_FINISH();
3999 IEM_MC_END();
4000 }
4001 }
4002}
4003
4004
4005/**
4006 * @opcode 0x2b
4007 * @opcodesub !11 mr/reg
4008 * @oppfx none
4009 * @opcpuid sse
4010 * @opgroup og_sse1_cachect
4011 * @opxcpttype 1
4012 * @optest op1=1 op2=2 -> op1=2
4013 * @optest op1=0 op2=-42 -> op1=-42
4014 */
4015FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
4016{
4017 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4018 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4019 if (IEM_IS_MODRM_MEM_MODE(bRm))
4020 {
4021 /*
4022 * memory, register.
4023 */
4024 IEM_MC_BEGIN(0, 2);
4025 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4027
4028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4032
4033 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4034 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4035
4036 IEM_MC_ADVANCE_RIP_AND_FINISH();
4037 IEM_MC_END();
4038 }
4039 /* The register, register encoding is invalid. */
4040 else
4041 return IEMOP_RAISE_INVALID_OPCODE();
4042}
4043
4044/**
4045 * @opcode 0x2b
4046 * @opcodesub !11 mr/reg
4047 * @oppfx 0x66
4048 * @opcpuid sse2
4049 * @opgroup og_sse2_cachect
4050 * @opxcpttype 1
4051 * @optest op1=1 op2=2 -> op1=2
4052 * @optest op1=0 op2=-42 -> op1=-42
4053 */
4054FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
4055{
4056 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4057 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4058 if (IEM_IS_MODRM_MEM_MODE(bRm))
4059 {
4060 /*
4061 * memory, register.
4062 */
4063 IEM_MC_BEGIN(0, 2);
4064 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4066
4067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4069 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4070 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4071
4072 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4073 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4074
4075 IEM_MC_ADVANCE_RIP_AND_FINISH();
4076 IEM_MC_END();
4077 }
4078 /* The register, register encoding is invalid. */
4079 else
4080 return IEMOP_RAISE_INVALID_OPCODE();
4081}
4082/* Opcode 0xf3 0x0f 0x2b - invalid */
4083/* Opcode 0xf2 0x0f 0x2b - invalid */
4084
4085
4086/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
4087FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
4088{
4089 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4091 if (IEM_IS_MODRM_REG_MODE(bRm))
4092 {
4093 /*
4094 * Register, register.
4095 */
4096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4097
4098 IEM_MC_BEGIN(3, 1);
4099 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4100 IEM_MC_LOCAL(uint64_t, u64Dst);
4101 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4102 IEM_MC_ARG(uint64_t, u64Src, 2);
4103 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4104 IEM_MC_PREPARE_FPU_USAGE();
4105 IEM_MC_FPU_TO_MMX_MODE();
4106
4107 IEM_MC_REF_MXCSR(pfMxcsr);
4108 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4109
4110 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4111 IEM_MC_IF_MXCSR_XCPT_PENDING()
4112 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4113 IEM_MC_ELSE()
4114 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst);
4115 IEM_MC_ENDIF();
4116
4117 IEM_MC_ADVANCE_RIP_AND_FINISH();
4118 IEM_MC_END();
4119 }
4120 else
4121 {
4122 /*
4123 * Register, memory.
4124 */
4125 IEM_MC_BEGIN(3, 2);
4126 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4127 IEM_MC_LOCAL(uint64_t, u64Dst);
4128 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4129 IEM_MC_ARG(uint64_t, u64Src, 2);
4130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4131
4132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4134 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4135 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4136
4137 IEM_MC_PREPARE_FPU_USAGE();
4138 IEM_MC_FPU_TO_MMX_MODE();
4139 IEM_MC_REF_MXCSR(pfMxcsr);
4140
4141 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4142 IEM_MC_IF_MXCSR_XCPT_PENDING()
4143 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4144 IEM_MC_ELSE()
4145 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst);
4146 IEM_MC_ENDIF();
4147
4148 IEM_MC_ADVANCE_RIP_AND_FINISH();
4149 IEM_MC_END();
4150 }
4151}
4152
4153
4154/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4155FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4156{
4157 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4159 if (IEM_IS_MODRM_REG_MODE(bRm))
4160 {
4161 /*
4162 * Register, register.
4163 */
4164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4165
4166 IEM_MC_BEGIN(3, 1);
4167 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4168 IEM_MC_LOCAL(uint64_t, u64Dst);
4169 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4170 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4171 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4172 IEM_MC_PREPARE_FPU_USAGE();
4173 IEM_MC_FPU_TO_MMX_MODE();
4174
4175 IEM_MC_REF_MXCSR(pfMxcsr);
4176 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4177
4178 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4179 IEM_MC_IF_MXCSR_XCPT_PENDING()
4180 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4181 IEM_MC_ELSE()
4182 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst);
4183 IEM_MC_ENDIF();
4184
4185 IEM_MC_ADVANCE_RIP_AND_FINISH();
4186 IEM_MC_END();
4187 }
4188 else
4189 {
4190 /*
4191 * Register, memory.
4192 */
4193 IEM_MC_BEGIN(3, 3);
4194 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4195 IEM_MC_LOCAL(uint64_t, u64Dst);
4196 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4197 IEM_MC_LOCAL(X86XMMREG, uSrc);
4198 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4200
4201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4203 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4204 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4205
4206 IEM_MC_PREPARE_FPU_USAGE();
4207 IEM_MC_FPU_TO_MMX_MODE();
4208
4209 IEM_MC_REF_MXCSR(pfMxcsr);
4210
4211 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4212 IEM_MC_IF_MXCSR_XCPT_PENDING()
4213 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4214 IEM_MC_ELSE()
4215 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst);
4216 IEM_MC_ENDIF();
4217
4218 IEM_MC_ADVANCE_RIP_AND_FINISH();
4219 IEM_MC_END();
4220 }
4221}
4222
4223
4224/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4225FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4226{
4227 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4228
4229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4230 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4231 {
4232 if (IEM_IS_MODRM_REG_MODE(bRm))
4233 {
4234 /* greg64, XMM */
4235 IEM_MC_BEGIN(3, 2);
4236 IEM_MC_LOCAL(uint32_t, fMxcsr);
4237 IEM_MC_LOCAL(int64_t, i64Dst);
4238 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4239 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4240 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4241
4242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4243 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4244 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4245
4246 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4247 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4248 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4249 IEM_MC_IF_MXCSR_XCPT_PENDING()
4250 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4251 IEM_MC_ELSE()
4252 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4253 IEM_MC_ENDIF();
4254
4255 IEM_MC_ADVANCE_RIP_AND_FINISH();
4256 IEM_MC_END();
4257 }
4258 else
4259 {
4260 /* greg64, [mem64] */
4261 IEM_MC_BEGIN(3, 4);
4262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4263 IEM_MC_LOCAL(uint32_t, fMxcsr);
4264 IEM_MC_LOCAL(int64_t, i64Dst);
4265 IEM_MC_LOCAL(uint32_t, u32Src);
4266 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4267 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4268 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4269
4270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4272 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4273 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4274
4275 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4276 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4277 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4278 IEM_MC_IF_MXCSR_XCPT_PENDING()
4279 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4280 IEM_MC_ELSE()
4281 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4282 IEM_MC_ENDIF();
4283
4284 IEM_MC_ADVANCE_RIP_AND_FINISH();
4285 IEM_MC_END();
4286 }
4287 }
4288 else
4289 {
4290 if (IEM_IS_MODRM_REG_MODE(bRm))
4291 {
4292 /* greg, XMM */
4293 IEM_MC_BEGIN(3, 2);
4294 IEM_MC_LOCAL(uint32_t, fMxcsr);
4295 IEM_MC_LOCAL(int32_t, i32Dst);
4296 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4297 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4298 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4299
4300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4301 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4302 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4303
4304 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4305 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4306 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4307 IEM_MC_IF_MXCSR_XCPT_PENDING()
4308 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4309 IEM_MC_ELSE()
4310 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4311 IEM_MC_ENDIF();
4312
4313 IEM_MC_ADVANCE_RIP_AND_FINISH();
4314 IEM_MC_END();
4315 }
4316 else
4317 {
4318 /* greg, [mem] */
4319 IEM_MC_BEGIN(3, 4);
4320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4321 IEM_MC_LOCAL(uint32_t, fMxcsr);
4322 IEM_MC_LOCAL(int32_t, i32Dst);
4323 IEM_MC_LOCAL(uint32_t, u32Src);
4324 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4325 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4326 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4327
4328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4330 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4331 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4332
4333 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4334 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4335 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4336 IEM_MC_IF_MXCSR_XCPT_PENDING()
4337 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4338 IEM_MC_ELSE()
4339 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4340 IEM_MC_ENDIF();
4341
4342 IEM_MC_ADVANCE_RIP_AND_FINISH();
4343 IEM_MC_END();
4344 }
4345 }
4346}
4347
4348
4349/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4350FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4351{
4352 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4353
4354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4355 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4356 {
4357 if (IEM_IS_MODRM_REG_MODE(bRm))
4358 {
4359 /* greg64, XMM */
4360 IEM_MC_BEGIN(3, 2);
4361 IEM_MC_LOCAL(uint32_t, fMxcsr);
4362 IEM_MC_LOCAL(int64_t, i64Dst);
4363 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4364 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4365 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4366
4367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4368 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4369 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4370
4371 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4372 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4373 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4374 IEM_MC_IF_MXCSR_XCPT_PENDING()
4375 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4376 IEM_MC_ELSE()
4377 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4378 IEM_MC_ENDIF();
4379
4380 IEM_MC_ADVANCE_RIP_AND_FINISH();
4381 IEM_MC_END();
4382 }
4383 else
4384 {
4385 /* greg64, [mem64] */
4386 IEM_MC_BEGIN(3, 4);
4387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4388 IEM_MC_LOCAL(uint32_t, fMxcsr);
4389 IEM_MC_LOCAL(int64_t, i64Dst);
4390 IEM_MC_LOCAL(uint64_t, u64Src);
4391 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4392 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4393 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4394
4395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4397 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4398 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4399
4400 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4401 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4402 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4403 IEM_MC_IF_MXCSR_XCPT_PENDING()
4404 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4405 IEM_MC_ELSE()
4406 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4407 IEM_MC_ENDIF();
4408
4409 IEM_MC_ADVANCE_RIP_AND_FINISH();
4410 IEM_MC_END();
4411 }
4412 }
4413 else
4414 {
4415 if (IEM_IS_MODRM_REG_MODE(bRm))
4416 {
4417 /* greg, XMM */
4418 IEM_MC_BEGIN(3, 2);
4419 IEM_MC_LOCAL(uint32_t, fMxcsr);
4420 IEM_MC_LOCAL(int32_t, i32Dst);
4421 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4422 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4423 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4424
4425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4426 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4427 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4428
4429 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4430 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4431 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4432 IEM_MC_IF_MXCSR_XCPT_PENDING()
4433 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4434 IEM_MC_ELSE()
4435 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4436 IEM_MC_ENDIF();
4437
4438 IEM_MC_ADVANCE_RIP_AND_FINISH();
4439 IEM_MC_END();
4440 }
4441 else
4442 {
4443 /* greg32, [mem32] */
4444 IEM_MC_BEGIN(3, 4);
4445 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4446 IEM_MC_LOCAL(uint32_t, fMxcsr);
4447 IEM_MC_LOCAL(int32_t, i32Dst);
4448 IEM_MC_LOCAL(uint64_t, u64Src);
4449 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4450 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4451 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4452
4453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4455 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4456 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4457
4458 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4459 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4460 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4461 IEM_MC_IF_MXCSR_XCPT_PENDING()
4462 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4463 IEM_MC_ELSE()
4464 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4465 IEM_MC_ENDIF();
4466
4467 IEM_MC_ADVANCE_RIP_AND_FINISH();
4468 IEM_MC_END();
4469 }
4470 }
4471}
4472
4473
4474/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4475FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4476{
4477 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4478 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4479 if (IEM_IS_MODRM_REG_MODE(bRm))
4480 {
4481 /*
4482 * Register, register.
4483 */
4484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4485
4486 IEM_MC_BEGIN(3, 1);
4487 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4488 IEM_MC_LOCAL(uint64_t, u64Dst);
4489 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4490 IEM_MC_ARG(uint64_t, u64Src, 2);
4491 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4492 IEM_MC_PREPARE_FPU_USAGE();
4493 IEM_MC_FPU_TO_MMX_MODE();
4494
4495 IEM_MC_REF_MXCSR(pfMxcsr);
4496 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4497
4498 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4499 IEM_MC_IF_MXCSR_XCPT_PENDING()
4500 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4501 IEM_MC_ELSE()
4502 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst);
4503 IEM_MC_ENDIF();
4504
4505 IEM_MC_ADVANCE_RIP_AND_FINISH();
4506 IEM_MC_END();
4507 }
4508 else
4509 {
4510 /*
4511 * Register, memory.
4512 */
4513 IEM_MC_BEGIN(3, 2);
4514 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4515 IEM_MC_LOCAL(uint64_t, u64Dst);
4516 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4517 IEM_MC_ARG(uint64_t, u64Src, 2);
4518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4519
4520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4522 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4523 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4524
4525 IEM_MC_PREPARE_FPU_USAGE();
4526 IEM_MC_FPU_TO_MMX_MODE();
4527 IEM_MC_REF_MXCSR(pfMxcsr);
4528
4529 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4530 IEM_MC_IF_MXCSR_XCPT_PENDING()
4531 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4532 IEM_MC_ELSE()
4533 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst);
4534 IEM_MC_ENDIF();
4535
4536 IEM_MC_ADVANCE_RIP_AND_FINISH();
4537 IEM_MC_END();
4538 }
4539}
4540
4541
4542/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4543FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4544{
4545 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4546 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4547 if (IEM_IS_MODRM_REG_MODE(bRm))
4548 {
4549 /*
4550 * Register, register.
4551 */
4552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4553
4554 IEM_MC_BEGIN(3, 1);
4555 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4556 IEM_MC_LOCAL(uint64_t, u64Dst);
4557 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4558 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4559 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4560 IEM_MC_PREPARE_FPU_USAGE();
4561 IEM_MC_FPU_TO_MMX_MODE();
4562
4563 IEM_MC_REF_MXCSR(pfMxcsr);
4564 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4565
4566 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4567 IEM_MC_IF_MXCSR_XCPT_PENDING()
4568 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4569 IEM_MC_ELSE()
4570 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst);
4571 IEM_MC_ENDIF();
4572
4573 IEM_MC_ADVANCE_RIP_AND_FINISH();
4574 IEM_MC_END();
4575 }
4576 else
4577 {
4578 /*
4579 * Register, memory.
4580 */
4581 IEM_MC_BEGIN(3, 3);
4582 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4583 IEM_MC_LOCAL(uint64_t, u64Dst);
4584 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4585 IEM_MC_LOCAL(X86XMMREG, uSrc);
4586 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4587 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4588
4589 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4591 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4592 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4593
4594 IEM_MC_PREPARE_FPU_USAGE();
4595 IEM_MC_FPU_TO_MMX_MODE();
4596
4597 IEM_MC_REF_MXCSR(pfMxcsr);
4598
4599 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4600 IEM_MC_IF_MXCSR_XCPT_PENDING()
4601 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4602 IEM_MC_ELSE()
4603 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst);
4604 IEM_MC_ENDIF();
4605
4606 IEM_MC_ADVANCE_RIP_AND_FINISH();
4607 IEM_MC_END();
4608 }
4609}
4610
4611
4612/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4613FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4614{
4615 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4616
4617 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4618 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4619 {
4620 if (IEM_IS_MODRM_REG_MODE(bRm))
4621 {
4622 /* greg64, XMM */
4623 IEM_MC_BEGIN(3, 2);
4624 IEM_MC_LOCAL(uint32_t, fMxcsr);
4625 IEM_MC_LOCAL(int64_t, i64Dst);
4626 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4627 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4628 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4629
4630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4631 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4632 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4633
4634 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4635 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4636 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4637 IEM_MC_IF_MXCSR_XCPT_PENDING()
4638 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4639 IEM_MC_ELSE()
4640 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4641 IEM_MC_ENDIF();
4642
4643 IEM_MC_ADVANCE_RIP_AND_FINISH();
4644 IEM_MC_END();
4645 }
4646 else
4647 {
4648 /* greg64, [mem64] */
4649 IEM_MC_BEGIN(3, 4);
4650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4651 IEM_MC_LOCAL(uint32_t, fMxcsr);
4652 IEM_MC_LOCAL(int64_t, i64Dst);
4653 IEM_MC_LOCAL(uint32_t, u32Src);
4654 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4655 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4656 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4657
4658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4660 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4661 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4662
4663 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4664 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4665 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4666 IEM_MC_IF_MXCSR_XCPT_PENDING()
4667 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4668 IEM_MC_ELSE()
4669 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4670 IEM_MC_ENDIF();
4671
4672 IEM_MC_ADVANCE_RIP_AND_FINISH();
4673 IEM_MC_END();
4674 }
4675 }
4676 else
4677 {
4678 if (IEM_IS_MODRM_REG_MODE(bRm))
4679 {
4680 /* greg, XMM */
4681 IEM_MC_BEGIN(3, 2);
4682 IEM_MC_LOCAL(uint32_t, fMxcsr);
4683 IEM_MC_LOCAL(int32_t, i32Dst);
4684 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4685 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4686 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4687
4688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4689 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4690 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4691
4692 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4693 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4694 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4695 IEM_MC_IF_MXCSR_XCPT_PENDING()
4696 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4697 IEM_MC_ELSE()
4698 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4699 IEM_MC_ENDIF();
4700
4701 IEM_MC_ADVANCE_RIP_AND_FINISH();
4702 IEM_MC_END();
4703 }
4704 else
4705 {
4706 /* greg, [mem] */
4707 IEM_MC_BEGIN(3, 4);
4708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4709 IEM_MC_LOCAL(uint32_t, fMxcsr);
4710 IEM_MC_LOCAL(int32_t, i32Dst);
4711 IEM_MC_LOCAL(uint32_t, u32Src);
4712 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4713 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4714 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4715
4716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4718 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4719 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4720
4721 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4722 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4723 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4724 IEM_MC_IF_MXCSR_XCPT_PENDING()
4725 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4726 IEM_MC_ELSE()
4727 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4728 IEM_MC_ENDIF();
4729
4730 IEM_MC_ADVANCE_RIP_AND_FINISH();
4731 IEM_MC_END();
4732 }
4733 }
4734}
4735
4736
4737/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4738FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4739{
4740 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4741
4742 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4743 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4744 {
4745 if (IEM_IS_MODRM_REG_MODE(bRm))
4746 {
4747 /* greg64, XMM */
4748 IEM_MC_BEGIN(3, 2);
4749 IEM_MC_LOCAL(uint32_t, fMxcsr);
4750 IEM_MC_LOCAL(int64_t, i64Dst);
4751 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4752 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4753 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4754
4755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4756 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4757 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4758
4759 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4760 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4761 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4762 IEM_MC_IF_MXCSR_XCPT_PENDING()
4763 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4764 IEM_MC_ELSE()
4765 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4766 IEM_MC_ENDIF();
4767
4768 IEM_MC_ADVANCE_RIP_AND_FINISH();
4769 IEM_MC_END();
4770 }
4771 else
4772 {
4773 /* greg64, [mem64] */
4774 IEM_MC_BEGIN(3, 4);
4775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4776 IEM_MC_LOCAL(uint32_t, fMxcsr);
4777 IEM_MC_LOCAL(int64_t, i64Dst);
4778 IEM_MC_LOCAL(uint64_t, u64Src);
4779 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4780 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4781 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4782
4783 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4785 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4786 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4787
4788 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4789 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4790 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4791 IEM_MC_IF_MXCSR_XCPT_PENDING()
4792 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4793 IEM_MC_ELSE()
4794 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4795 IEM_MC_ENDIF();
4796
4797 IEM_MC_ADVANCE_RIP_AND_FINISH();
4798 IEM_MC_END();
4799 }
4800 }
4801 else
4802 {
4803 if (IEM_IS_MODRM_REG_MODE(bRm))
4804 {
4805 /* greg32, XMM */
4806 IEM_MC_BEGIN(3, 2);
4807 IEM_MC_LOCAL(uint32_t, fMxcsr);
4808 IEM_MC_LOCAL(int32_t, i32Dst);
4809 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4810 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4811 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4812
4813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4814 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4815 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4816
4817 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4818 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4819 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4820 IEM_MC_IF_MXCSR_XCPT_PENDING()
4821 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4822 IEM_MC_ELSE()
4823 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4824 IEM_MC_ENDIF();
4825
4826 IEM_MC_ADVANCE_RIP_AND_FINISH();
4827 IEM_MC_END();
4828 }
4829 else
4830 {
4831 /* greg32, [mem64] */
4832 IEM_MC_BEGIN(3, 4);
4833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4834 IEM_MC_LOCAL(uint32_t, fMxcsr);
4835 IEM_MC_LOCAL(int32_t, i32Dst);
4836 IEM_MC_LOCAL(uint64_t, u64Src);
4837 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4838 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4839 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4840
4841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4843 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4844 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4845
4846 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4847 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4848 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4849 IEM_MC_IF_MXCSR_XCPT_PENDING()
4850 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4851 IEM_MC_ELSE()
4852 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4853 IEM_MC_ENDIF();
4854
4855 IEM_MC_ADVANCE_RIP_AND_FINISH();
4856 IEM_MC_END();
4857 }
4858 }
4859}
4860
4861
4862/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
4863FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4864{
4865 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4866 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4867 if (IEM_IS_MODRM_REG_MODE(bRm))
4868 {
4869 /*
4870 * Register, register.
4871 */
4872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4873 IEM_MC_BEGIN(4, 1);
4874 IEM_MC_LOCAL(uint32_t, fEFlags);
4875 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4876 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4877 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4878 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4879 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4880 IEM_MC_PREPARE_SSE_USAGE();
4881 IEM_MC_FETCH_EFLAGS(fEFlags);
4882 IEM_MC_REF_MXCSR(pfMxcsr);
4883 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4884 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4885 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4886 IEM_MC_IF_MXCSR_XCPT_PENDING()
4887 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4888 IEM_MC_ELSE()
4889 IEM_MC_COMMIT_EFLAGS(fEFlags);
4890 IEM_MC_ENDIF();
4891
4892 IEM_MC_ADVANCE_RIP_AND_FINISH();
4893 IEM_MC_END();
4894 }
4895 else
4896 {
4897 /*
4898 * Register, memory.
4899 */
4900 IEM_MC_BEGIN(4, 3);
4901 IEM_MC_LOCAL(uint32_t, fEFlags);
4902 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4903 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4904 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4905 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4906 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4908
4909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4911 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4912 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4913
4914 IEM_MC_PREPARE_SSE_USAGE();
4915 IEM_MC_FETCH_EFLAGS(fEFlags);
4916 IEM_MC_REF_MXCSR(pfMxcsr);
4917 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4918 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4919 IEM_MC_IF_MXCSR_XCPT_PENDING()
4920 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4921 IEM_MC_ELSE()
4922 IEM_MC_COMMIT_EFLAGS(fEFlags);
4923 IEM_MC_ENDIF();
4924
4925 IEM_MC_ADVANCE_RIP_AND_FINISH();
4926 IEM_MC_END();
4927 }
4928}
4929
4930
4931/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
4932FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4933{
4934 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4935 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4936 if (IEM_IS_MODRM_REG_MODE(bRm))
4937 {
4938 /*
4939 * Register, register.
4940 */
4941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4942 IEM_MC_BEGIN(4, 1);
4943 IEM_MC_LOCAL(uint32_t, fEFlags);
4944 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4945 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4946 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4947 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4948 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4949 IEM_MC_PREPARE_SSE_USAGE();
4950 IEM_MC_FETCH_EFLAGS(fEFlags);
4951 IEM_MC_REF_MXCSR(pfMxcsr);
4952 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4953 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4954 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4955 IEM_MC_IF_MXCSR_XCPT_PENDING()
4956 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4957 IEM_MC_ELSE()
4958 IEM_MC_COMMIT_EFLAGS(fEFlags);
4959 IEM_MC_ENDIF();
4960
4961 IEM_MC_ADVANCE_RIP_AND_FINISH();
4962 IEM_MC_END();
4963 }
4964 else
4965 {
4966 /*
4967 * Register, memory.
4968 */
4969 IEM_MC_BEGIN(4, 3);
4970 IEM_MC_LOCAL(uint32_t, fEFlags);
4971 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4972 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4973 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4974 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4975 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4977
4978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4980 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4981 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4982
4983 IEM_MC_PREPARE_SSE_USAGE();
4984 IEM_MC_FETCH_EFLAGS(fEFlags);
4985 IEM_MC_REF_MXCSR(pfMxcsr);
4986 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4987 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4988 IEM_MC_IF_MXCSR_XCPT_PENDING()
4989 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4990 IEM_MC_ELSE()
4991 IEM_MC_COMMIT_EFLAGS(fEFlags);
4992 IEM_MC_ENDIF();
4993
4994 IEM_MC_ADVANCE_RIP_AND_FINISH();
4995 IEM_MC_END();
4996 }
4997}
4998
4999
5000/* Opcode 0xf3 0x0f 0x2e - invalid */
5001/* Opcode 0xf2 0x0f 0x2e - invalid */
5002
5003
5004/** Opcode 0x0f 0x2f - comiss Vss, Wss */
5005FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
5006{
5007 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5008 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5009 if (IEM_IS_MODRM_REG_MODE(bRm))
5010 {
5011 /*
5012 * Register, register.
5013 */
5014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5015 IEM_MC_BEGIN(4, 1);
5016 IEM_MC_LOCAL(uint32_t, fEFlags);
5017 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5018 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5019 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5020 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5021 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5022 IEM_MC_PREPARE_SSE_USAGE();
5023 IEM_MC_FETCH_EFLAGS(fEFlags);
5024 IEM_MC_REF_MXCSR(pfMxcsr);
5025 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5026 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5027 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5028 IEM_MC_IF_MXCSR_XCPT_PENDING()
5029 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5030 IEM_MC_ELSE()
5031 IEM_MC_COMMIT_EFLAGS(fEFlags);
5032 IEM_MC_ENDIF();
5033
5034 IEM_MC_ADVANCE_RIP_AND_FINISH();
5035 IEM_MC_END();
5036 }
5037 else
5038 {
5039 /*
5040 * Register, memory.
5041 */
5042 IEM_MC_BEGIN(4, 3);
5043 IEM_MC_LOCAL(uint32_t, fEFlags);
5044 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5045 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5046 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5047 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5048 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5050
5051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5053 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5054 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5055
5056 IEM_MC_PREPARE_SSE_USAGE();
5057 IEM_MC_FETCH_EFLAGS(fEFlags);
5058 IEM_MC_REF_MXCSR(pfMxcsr);
5059 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5060 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5061 IEM_MC_IF_MXCSR_XCPT_PENDING()
5062 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5063 IEM_MC_ELSE()
5064 IEM_MC_COMMIT_EFLAGS(fEFlags);
5065 IEM_MC_ENDIF();
5066
5067 IEM_MC_ADVANCE_RIP_AND_FINISH();
5068 IEM_MC_END();
5069 }
5070}
5071
5072
5073/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
5074FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
5075{
5076 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5077 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5078 if (IEM_IS_MODRM_REG_MODE(bRm))
5079 {
5080 /*
5081 * Register, register.
5082 */
5083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5084 IEM_MC_BEGIN(4, 1);
5085 IEM_MC_LOCAL(uint32_t, fEFlags);
5086 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5087 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5088 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5089 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5090 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5091 IEM_MC_PREPARE_SSE_USAGE();
5092 IEM_MC_FETCH_EFLAGS(fEFlags);
5093 IEM_MC_REF_MXCSR(pfMxcsr);
5094 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5095 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5096 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5097 IEM_MC_IF_MXCSR_XCPT_PENDING()
5098 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5099 IEM_MC_ELSE()
5100 IEM_MC_COMMIT_EFLAGS(fEFlags);
5101 IEM_MC_ENDIF();
5102
5103 IEM_MC_ADVANCE_RIP_AND_FINISH();
5104 IEM_MC_END();
5105 }
5106 else
5107 {
5108 /*
5109 * Register, memory.
5110 */
5111 IEM_MC_BEGIN(4, 3);
5112 IEM_MC_LOCAL(uint32_t, fEFlags);
5113 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5114 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5115 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5116 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5117 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5119
5120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5122 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5123 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5124
5125 IEM_MC_PREPARE_SSE_USAGE();
5126 IEM_MC_FETCH_EFLAGS(fEFlags);
5127 IEM_MC_REF_MXCSR(pfMxcsr);
5128 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5129 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5130 IEM_MC_IF_MXCSR_XCPT_PENDING()
5131 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5132 IEM_MC_ELSE()
5133 IEM_MC_COMMIT_EFLAGS(fEFlags);
5134 IEM_MC_ENDIF();
5135
5136 IEM_MC_ADVANCE_RIP_AND_FINISH();
5137 IEM_MC_END();
5138 }
5139}
5140
5141
5142/* Opcode 0xf3 0x0f 0x2f - invalid */
5143/* Opcode 0xf2 0x0f 0x2f - invalid */
5144
5145/** Opcode 0x0f 0x30. */
5146FNIEMOP_DEF(iemOp_wrmsr)
5147{
5148 IEMOP_MNEMONIC(wrmsr, "wrmsr");
5149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5150 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
5151}
5152
5153
5154/** Opcode 0x0f 0x31. */
5155FNIEMOP_DEF(iemOp_rdtsc)
5156{
5157 IEMOP_MNEMONIC(rdtsc, "rdtsc");
5158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5159 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
5160}
5161
5162
5163/** Opcode 0x0f 0x33. */
5164FNIEMOP_DEF(iemOp_rdmsr)
5165{
5166 IEMOP_MNEMONIC(rdmsr, "rdmsr");
5167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5168 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
5169}
5170
5171
5172/** Opcode 0x0f 0x34. */
5173FNIEMOP_DEF(iemOp_rdpmc)
5174{
5175 IEMOP_MNEMONIC(rdpmc, "rdpmc");
5176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5177 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
5178}
5179
5180
5181/** Opcode 0x0f 0x34. */
5182FNIEMOP_DEF(iemOp_sysenter)
5183{
5184 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5186 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
5187}
5188
5189/** Opcode 0x0f 0x35. */
5190FNIEMOP_DEF(iemOp_sysexit)
5191{
5192 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5194 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
5195}
5196
5197/** Opcode 0x0f 0x37. */
5198FNIEMOP_STUB(iemOp_getsec);
5199
5200
5201/** Opcode 0x0f 0x38. */
5202FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
5203{
5204#ifdef IEM_WITH_THREE_0F_38
5205 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5206 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5207#else
5208 IEMOP_BITCH_ABOUT_STUB();
5209 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5210#endif
5211}
5212
5213
5214/** Opcode 0x0f 0x3a. */
5215FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
5216{
5217#ifdef IEM_WITH_THREE_0F_3A
5218 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5219 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5220#else
5221 IEMOP_BITCH_ABOUT_STUB();
5222 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5223#endif
5224}
5225
5226
5227/**
5228 * Implements a conditional move.
5229 *
5230 * Wish there was an obvious way to do this where we could share and reduce
5231 * code bloat.
5232 *
5233 * @param a_Cnd The conditional "microcode" operation.
5234 */
5235#define CMOV_X(a_Cnd) \
5236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5237 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5238 { \
5239 switch (pVCpu->iem.s.enmEffOpSize) \
5240 { \
5241 case IEMMODE_16BIT: \
5242 IEM_MC_BEGIN(0, 1); \
5243 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5244 a_Cnd { \
5245 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5246 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5247 } IEM_MC_ENDIF(); \
5248 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5249 IEM_MC_END(); \
5250 break; \
5251 \
5252 case IEMMODE_32BIT: \
5253 IEM_MC_BEGIN(0, 1); \
5254 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5255 a_Cnd { \
5256 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5257 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5258 } IEM_MC_ELSE() { \
5259 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5260 } IEM_MC_ENDIF(); \
5261 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5262 IEM_MC_END(); \
5263 break; \
5264 \
5265 case IEMMODE_64BIT: \
5266 IEM_MC_BEGIN(0, 1); \
5267 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5268 a_Cnd { \
5269 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5270 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5271 } IEM_MC_ENDIF(); \
5272 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5273 IEM_MC_END(); \
5274 break; \
5275 \
5276 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5277 } \
5278 } \
5279 else \
5280 { \
5281 switch (pVCpu->iem.s.enmEffOpSize) \
5282 { \
5283 case IEMMODE_16BIT: \
5284 IEM_MC_BEGIN(0, 2); \
5285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5286 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5288 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5289 a_Cnd { \
5290 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5291 } IEM_MC_ENDIF(); \
5292 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5293 IEM_MC_END(); \
5294 break; \
5295 \
5296 case IEMMODE_32BIT: \
5297 IEM_MC_BEGIN(0, 2); \
5298 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5299 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5301 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5302 a_Cnd { \
5303 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5304 } IEM_MC_ELSE() { \
5305 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5306 } IEM_MC_ENDIF(); \
5307 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5308 IEM_MC_END(); \
5309 break; \
5310 \
5311 case IEMMODE_64BIT: \
5312 IEM_MC_BEGIN(0, 2); \
5313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5314 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5316 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5317 a_Cnd { \
5318 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5319 } IEM_MC_ENDIF(); \
5320 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5321 IEM_MC_END(); \
5322 break; \
5323 \
5324 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5325 } \
5326 } do {} while (0)
5327
5328
5329
5330/** Opcode 0x0f 0x40. */
5331FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5332{
5333 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5334 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5335}
5336
5337
5338/** Opcode 0x0f 0x41. */
5339FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5340{
5341 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5342 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5343}
5344
5345
5346/** Opcode 0x0f 0x42. */
5347FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5348{
5349 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5350 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5351}
5352
5353
5354/** Opcode 0x0f 0x43. */
5355FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5356{
5357 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5358 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5359}
5360
5361
5362/** Opcode 0x0f 0x44. */
5363FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5364{
5365 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5366 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5367}
5368
5369
5370/** Opcode 0x0f 0x45. */
5371FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5372{
5373 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5374 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5375}
5376
5377
5378/** Opcode 0x0f 0x46. */
5379FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5380{
5381 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5382 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5383}
5384
5385
5386/** Opcode 0x0f 0x47. */
5387FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5388{
5389 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5390 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5391}
5392
5393
5394/** Opcode 0x0f 0x48. */
5395FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5396{
5397 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5398 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5399}
5400
5401
5402/** Opcode 0x0f 0x49. */
5403FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5404{
5405 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5406 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5407}
5408
5409
5410/** Opcode 0x0f 0x4a. */
5411FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5412{
5413 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5414 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5415}
5416
5417
5418/** Opcode 0x0f 0x4b. */
5419FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5420{
5421 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5422 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5423}
5424
5425
5426/** Opcode 0x0f 0x4c. */
5427FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5428{
5429 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5430 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5431}
5432
5433
5434/** Opcode 0x0f 0x4d. */
5435FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5436{
5437 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5438 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5439}
5440
5441
5442/** Opcode 0x0f 0x4e. */
5443FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5444{
5445 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5446 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5447}
5448
5449
5450/** Opcode 0x0f 0x4f. */
5451FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5452{
5453 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5454 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5455}
5456
5457#undef CMOV_X
5458
5459/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5460FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5461{
5462 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /** @todo */
5463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5464 if (IEM_IS_MODRM_REG_MODE(bRm))
5465 {
5466 /*
5467 * Register, register.
5468 */
5469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5470 IEM_MC_BEGIN(2, 1);
5471 IEM_MC_LOCAL(uint8_t, u8Dst);
5472 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5473 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5474 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5475 IEM_MC_PREPARE_SSE_USAGE();
5476 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5477 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5478 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5479 IEM_MC_ADVANCE_RIP_AND_FINISH();
5480 IEM_MC_END();
5481 }
5482 /* No memory operand. */
5483 else
5484 return IEMOP_RAISE_INVALID_OPCODE();
5485}
5486
5487
5488/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5489FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5490{
5491 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /** @todo */
5492 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5493 if (IEM_IS_MODRM_REG_MODE(bRm))
5494 {
5495 /*
5496 * Register, register.
5497 */
5498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5499 IEM_MC_BEGIN(2, 1);
5500 IEM_MC_LOCAL(uint8_t, u8Dst);
5501 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5502 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5503 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5504 IEM_MC_PREPARE_SSE_USAGE();
5505 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5506 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5507 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5508 IEM_MC_ADVANCE_RIP_AND_FINISH();
5509 IEM_MC_END();
5510 }
5511 /* No memory operand. */
5512 else
5513 return IEMOP_RAISE_INVALID_OPCODE();
5514
5515}
5516
5517
5518/* Opcode 0xf3 0x0f 0x50 - invalid */
5519/* Opcode 0xf2 0x0f 0x50 - invalid */
5520
5521
5522/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5523FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5524{
5525 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5526 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5527}
5528
5529
5530/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5531FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5532{
5533 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5534 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5535}
5536
5537
5538/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5539FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5540{
5541 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5542 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5543}
5544
5545
5546/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5547FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5548{
5549 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5550 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5551}
5552
5553
5554/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5555FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
5556/* Opcode 0x66 0x0f 0x52 - invalid */
5557/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5558FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
5559/* Opcode 0xf2 0x0f 0x52 - invalid */
5560
5561/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5562FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
5563/* Opcode 0x66 0x0f 0x53 - invalid */
5564/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5565FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
5566/* Opcode 0xf2 0x0f 0x53 - invalid */
5567
5568
5569/** Opcode 0x0f 0x54 - andps Vps, Wps */
5570FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5571{
5572 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5573 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pand_u128);
5574}
5575
5576
5577/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5578FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5579{
5580 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5581 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5582}
5583
5584
5585/* Opcode 0xf3 0x0f 0x54 - invalid */
5586/* Opcode 0xf2 0x0f 0x54 - invalid */
5587
5588
5589/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5590FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5591{
5592 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5593 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pandn_u128);
5594}
5595
5596
5597/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5598FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5599{
5600 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5601 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5602}
5603
5604
5605/* Opcode 0xf3 0x0f 0x55 - invalid */
5606/* Opcode 0xf2 0x0f 0x55 - invalid */
5607
5608
5609/** Opcode 0x0f 0x56 - orps Vps, Wps */
5610FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5611{
5612 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5613 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_por_u128);
5614}
5615
5616
5617/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5618FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5619{
5620 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5621 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5622}
5623
5624
5625/* Opcode 0xf3 0x0f 0x56 - invalid */
5626/* Opcode 0xf2 0x0f 0x56 - invalid */
5627
5628
5629/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5630FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5631{
5632 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5633 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pxor_u128);
5634}
5635
5636
5637/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5638FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5639{
5640 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5641 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5642}
5643
5644
5645/* Opcode 0xf3 0x0f 0x57 - invalid */
5646/* Opcode 0xf2 0x0f 0x57 - invalid */
5647
5648/** Opcode 0x0f 0x58 - addps Vps, Wps */
5649FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5650{
5651 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5652 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5653}
5654
5655
5656/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5657FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5658{
5659 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5660 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5661}
5662
5663
5664/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5665FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5666{
5667 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5668 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5669}
5670
5671
5672/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5673FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5674{
5675 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5676 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5677}
5678
5679
5680/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5681FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5682{
5683 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5684 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5685}
5686
5687
5688/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5689FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5690{
5691 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5692 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5693}
5694
5695
5696/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5697FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5698{
5699 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5700 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5701}
5702
5703
5704/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5705FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5706{
5707 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5708 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5709}
5710
5711
5712/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5713FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5714{
5715 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5716 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5717}
5718
5719
5720/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5721FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5722{
5723 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5724 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5725}
5726
5727
5728/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5729FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5730{
5731 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5732 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5733}
5734
5735
5736/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5737FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5738{
5739 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5740 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5741}
5742
5743
5744/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5745FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5746{
5747 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5748 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5749}
5750
5751
5752/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5753FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5754{
5755 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5756 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5757}
5758
5759
5760/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5761FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5762{
5763 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5764 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5765}
5766
5767
5768/* Opcode 0xf2 0x0f 0x5b - invalid */
5769
5770
5771/** Opcode 0x0f 0x5c - subps Vps, Wps */
5772FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5773{
5774 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5775 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5776}
5777
5778
5779/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5780FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5781{
5782 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5783 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5784}
5785
5786
5787/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5788FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5789{
5790 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5791 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5792}
5793
5794
5795/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5796FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5797{
5798 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5799 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5800}
5801
5802
5803/** Opcode 0x0f 0x5d - minps Vps, Wps */
5804FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5805{
5806 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5807 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5808}
5809
5810
5811/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5812FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5813{
5814 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5815 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5816}
5817
5818
5819/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5820FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5821{
5822 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5823 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5824}
5825
5826
5827/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5828FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5829{
5830 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5831 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5832}
5833
5834
5835/** Opcode 0x0f 0x5e - divps Vps, Wps */
5836FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5837{
5838 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5839 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5840}
5841
5842
5843/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5844FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5845{
5846 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5847 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5848}
5849
5850
5851/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5852FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5853{
5854 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5855 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5856}
5857
5858
5859/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5860FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5861{
5862 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5863 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5864}
5865
5866
5867/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5868FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5869{
5870 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5871 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5872}
5873
5874
5875/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5876FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5877{
5878 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5879 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5880}
5881
5882
5883/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5884FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5885{
5886 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5887 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5888}
5889
5890
5891/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5892FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5893{
5894 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5895 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5896}
5897
5898
5899/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5900FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5901{
5902 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5903 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5904}
5905
5906
5907/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5908FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5909{
5910 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5911 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5912}
5913
5914
5915/* Opcode 0xf3 0x0f 0x60 - invalid */
5916
5917
5918/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5919FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5920{
5921 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5922 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5923 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5924}
5925
5926
5927/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5928FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5929{
5930 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5931 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5932}
5933
5934
5935/* Opcode 0xf3 0x0f 0x61 - invalid */
5936
5937
5938/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5939FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5940{
5941 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5942 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5943}
5944
5945
5946/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5947FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5948{
5949 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5950 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5951}
5952
5953
5954/* Opcode 0xf3 0x0f 0x62 - invalid */
5955
5956
5957
5958/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5959FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5960{
5961 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5962 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5963}
5964
5965
5966/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5967FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5968{
5969 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5970 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5971}
5972
5973
5974/* Opcode 0xf3 0x0f 0x63 - invalid */
5975
5976
5977/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5978FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5979{
5980 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5981 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5982}
5983
5984
5985/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5986FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5987{
5988 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5989 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
5990}
5991
5992
5993/* Opcode 0xf3 0x0f 0x64 - invalid */
5994
5995
5996/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
5997FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
5998{
5999 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6000 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
6001}
6002
6003
6004/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
6005FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
6006{
6007 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6008 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
6009}
6010
6011
6012/* Opcode 0xf3 0x0f 0x65 - invalid */
6013
6014
6015/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
6016FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
6017{
6018 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6019 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
6020}
6021
6022
6023/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
6024FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
6025{
6026 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6027 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
6028}
6029
6030
6031/* Opcode 0xf3 0x0f 0x66 - invalid */
6032
6033
6034/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
6035FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
6036{
6037 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6038 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
6039}
6040
6041
6042/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
6043FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
6044{
6045 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6046 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
6047}
6048
6049
6050/* Opcode 0xf3 0x0f 0x67 - invalid */
6051
6052
6053/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
6054 * @note Intel and AMD both uses Qd for the second parameter, however they
6055 * both list it as a mmX/mem64 operand and intel describes it as being
6056 * loaded as a qword, so it should be Qq, shouldn't it? */
6057FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
6058{
6059 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6060 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
6061}
6062
6063
6064/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
6065FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
6066{
6067 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6068 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
6069}
6070
6071
6072/* Opcode 0xf3 0x0f 0x68 - invalid */
6073
6074
6075/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
6076 * @note Intel and AMD both uses Qd for the second parameter, however they
6077 * both list it as a mmX/mem64 operand and intel describes it as being
6078 * loaded as a qword, so it should be Qq, shouldn't it? */
6079FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
6080{
6081 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6082 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
6083}
6084
6085
6086/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
6087FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
6088{
6089 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6090 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
6091
6092}
6093
6094
6095/* Opcode 0xf3 0x0f 0x69 - invalid */
6096
6097
6098/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
6099 * @note Intel and AMD both uses Qd for the second parameter, however they
6100 * both list it as a mmX/mem64 operand and intel describes it as being
6101 * loaded as a qword, so it should be Qq, shouldn't it? */
6102FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
6103{
6104 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6105 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
6106}
6107
6108
6109/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
6110FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
6111{
6112 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6113 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
6114}
6115
6116
6117/* Opcode 0xf3 0x0f 0x6a - invalid */
6118
6119
6120/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
6121FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
6122{
6123 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6124 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
6125}
6126
6127
6128/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
6129FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
6130{
6131 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6132 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
6133}
6134
6135
6136/* Opcode 0xf3 0x0f 0x6b - invalid */
6137
6138
6139/* Opcode 0x0f 0x6c - invalid */
6140
6141
6142/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
6143FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
6144{
6145 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6146 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
6147}
6148
6149
6150/* Opcode 0xf3 0x0f 0x6c - invalid */
6151/* Opcode 0xf2 0x0f 0x6c - invalid */
6152
6153
6154/* Opcode 0x0f 0x6d - invalid */
6155
6156
6157/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
6158FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
6159{
6160 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6161 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6162}
6163
6164
6165/* Opcode 0xf3 0x0f 0x6d - invalid */
6166
6167
6168FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6169{
6170 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6171 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6172 {
6173 /**
6174 * @opcode 0x6e
6175 * @opcodesub rex.w=1
6176 * @oppfx none
6177 * @opcpuid mmx
6178 * @opgroup og_mmx_datamove
6179 * @opxcpttype 5
6180 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6181 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6182 */
6183 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6184 if (IEM_IS_MODRM_REG_MODE(bRm))
6185 {
6186 /* MMX, greg64 */
6187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6188 IEM_MC_BEGIN(0, 1);
6189 IEM_MC_LOCAL(uint64_t, u64Tmp);
6190
6191 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6192 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6193 IEM_MC_FPU_TO_MMX_MODE();
6194
6195 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6196 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6197
6198 IEM_MC_ADVANCE_RIP_AND_FINISH();
6199 IEM_MC_END();
6200 }
6201 else
6202 {
6203 /* MMX, [mem64] */
6204 IEM_MC_BEGIN(0, 2);
6205 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6206 IEM_MC_LOCAL(uint64_t, u64Tmp);
6207
6208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6210 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6211 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6212 IEM_MC_FPU_TO_MMX_MODE();
6213
6214 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6215 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6216
6217 IEM_MC_ADVANCE_RIP_AND_FINISH();
6218 IEM_MC_END();
6219 }
6220 }
6221 else
6222 {
6223 /**
6224 * @opdone
6225 * @opcode 0x6e
6226 * @opcodesub rex.w=0
6227 * @oppfx none
6228 * @opcpuid mmx
6229 * @opgroup og_mmx_datamove
6230 * @opxcpttype 5
6231 * @opfunction iemOp_movd_q_Pd_Ey
6232 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6233 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6234 */
6235 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6236 if (IEM_IS_MODRM_REG_MODE(bRm))
6237 {
6238 /* MMX, greg */
6239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6240 IEM_MC_BEGIN(0, 1);
6241 IEM_MC_LOCAL(uint64_t, u64Tmp);
6242
6243 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6244 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6245 IEM_MC_FPU_TO_MMX_MODE();
6246
6247 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6248 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6249
6250 IEM_MC_ADVANCE_RIP_AND_FINISH();
6251 IEM_MC_END();
6252 }
6253 else
6254 {
6255 /* MMX, [mem] */
6256 IEM_MC_BEGIN(0, 2);
6257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6258 IEM_MC_LOCAL(uint32_t, u32Tmp);
6259
6260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6262 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6263 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6264 IEM_MC_FPU_TO_MMX_MODE();
6265
6266 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6267 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6268
6269 IEM_MC_ADVANCE_RIP_AND_FINISH();
6270 IEM_MC_END();
6271 }
6272 }
6273}
6274
6275FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6276{
6277 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6278 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6279 {
6280 /**
6281 * @opcode 0x6e
6282 * @opcodesub rex.w=1
6283 * @oppfx 0x66
6284 * @opcpuid sse2
6285 * @opgroup og_sse2_simdint_datamove
6286 * @opxcpttype 5
6287 * @optest 64-bit / op1=1 op2=2 -> op1=2
6288 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6289 */
6290 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6291 if (IEM_IS_MODRM_REG_MODE(bRm))
6292 {
6293 /* XMM, greg64 */
6294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6295 IEM_MC_BEGIN(0, 1);
6296 IEM_MC_LOCAL(uint64_t, u64Tmp);
6297
6298 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6299 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6300
6301 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6302 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6303
6304 IEM_MC_ADVANCE_RIP_AND_FINISH();
6305 IEM_MC_END();
6306 }
6307 else
6308 {
6309 /* XMM, [mem64] */
6310 IEM_MC_BEGIN(0, 2);
6311 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6312 IEM_MC_LOCAL(uint64_t, u64Tmp);
6313
6314 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6316 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6317 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6318
6319 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6320 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6321
6322 IEM_MC_ADVANCE_RIP_AND_FINISH();
6323 IEM_MC_END();
6324 }
6325 }
6326 else
6327 {
6328 /**
6329 * @opdone
6330 * @opcode 0x6e
6331 * @opcodesub rex.w=0
6332 * @oppfx 0x66
6333 * @opcpuid sse2
6334 * @opgroup og_sse2_simdint_datamove
6335 * @opxcpttype 5
6336 * @opfunction iemOp_movd_q_Vy_Ey
6337 * @optest op1=1 op2=2 -> op1=2
6338 * @optest op1=0 op2=-42 -> op1=-42
6339 */
6340 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6341 if (IEM_IS_MODRM_REG_MODE(bRm))
6342 {
6343 /* XMM, greg32 */
6344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6345 IEM_MC_BEGIN(0, 1);
6346 IEM_MC_LOCAL(uint32_t, u32Tmp);
6347
6348 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6349 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6350
6351 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6352 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6353
6354 IEM_MC_ADVANCE_RIP_AND_FINISH();
6355 IEM_MC_END();
6356 }
6357 else
6358 {
6359 /* XMM, [mem32] */
6360 IEM_MC_BEGIN(0, 2);
6361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6362 IEM_MC_LOCAL(uint32_t, u32Tmp);
6363
6364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6366 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6367 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6368
6369 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6370 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6371
6372 IEM_MC_ADVANCE_RIP_AND_FINISH();
6373 IEM_MC_END();
6374 }
6375 }
6376}
6377
6378/* Opcode 0xf3 0x0f 0x6e - invalid */
6379
6380
6381/**
6382 * @opcode 0x6f
6383 * @oppfx none
6384 * @opcpuid mmx
6385 * @opgroup og_mmx_datamove
6386 * @opxcpttype 5
6387 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6388 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6389 */
6390FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6391{
6392 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6393 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6394 if (IEM_IS_MODRM_REG_MODE(bRm))
6395 {
6396 /*
6397 * Register, register.
6398 */
6399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6400 IEM_MC_BEGIN(0, 1);
6401 IEM_MC_LOCAL(uint64_t, u64Tmp);
6402
6403 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6404 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6405 IEM_MC_FPU_TO_MMX_MODE();
6406
6407 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6408 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6409
6410 IEM_MC_ADVANCE_RIP_AND_FINISH();
6411 IEM_MC_END();
6412 }
6413 else
6414 {
6415 /*
6416 * Register, memory.
6417 */
6418 IEM_MC_BEGIN(0, 2);
6419 IEM_MC_LOCAL(uint64_t, u64Tmp);
6420 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6421
6422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6424 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6425 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6426 IEM_MC_FPU_TO_MMX_MODE();
6427
6428 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6429 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6430
6431 IEM_MC_ADVANCE_RIP_AND_FINISH();
6432 IEM_MC_END();
6433 }
6434}
6435
6436/**
6437 * @opcode 0x6f
6438 * @oppfx 0x66
6439 * @opcpuid sse2
6440 * @opgroup og_sse2_simdint_datamove
6441 * @opxcpttype 1
6442 * @optest op1=1 op2=2 -> op1=2
6443 * @optest op1=0 op2=-42 -> op1=-42
6444 */
6445FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6446{
6447 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6448 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6449 if (IEM_IS_MODRM_REG_MODE(bRm))
6450 {
6451 /*
6452 * Register, register.
6453 */
6454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6455 IEM_MC_BEGIN(0, 0);
6456
6457 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6458 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6459
6460 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6461 IEM_GET_MODRM_RM(pVCpu, bRm));
6462 IEM_MC_ADVANCE_RIP_AND_FINISH();
6463 IEM_MC_END();
6464 }
6465 else
6466 {
6467 /*
6468 * Register, memory.
6469 */
6470 IEM_MC_BEGIN(0, 2);
6471 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6473
6474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6476 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6477 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6478
6479 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6480 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6481
6482 IEM_MC_ADVANCE_RIP_AND_FINISH();
6483 IEM_MC_END();
6484 }
6485}
6486
6487/**
6488 * @opcode 0x6f
6489 * @oppfx 0xf3
6490 * @opcpuid sse2
6491 * @opgroup og_sse2_simdint_datamove
6492 * @opxcpttype 4UA
6493 * @optest op1=1 op2=2 -> op1=2
6494 * @optest op1=0 op2=-42 -> op1=-42
6495 */
6496FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6497{
6498 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6500 if (IEM_IS_MODRM_REG_MODE(bRm))
6501 {
6502 /*
6503 * Register, register.
6504 */
6505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6506 IEM_MC_BEGIN(0, 0);
6507 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6508 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6509 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6510 IEM_GET_MODRM_RM(pVCpu, bRm));
6511 IEM_MC_ADVANCE_RIP_AND_FINISH();
6512 IEM_MC_END();
6513 }
6514 else
6515 {
6516 /*
6517 * Register, memory.
6518 */
6519 IEM_MC_BEGIN(0, 2);
6520 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6521 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6522
6523 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6525 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6526 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6527 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6528 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6529
6530 IEM_MC_ADVANCE_RIP_AND_FINISH();
6531 IEM_MC_END();
6532 }
6533}
6534
6535
6536/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6537FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6538{
6539 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6540 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6541 if (IEM_IS_MODRM_REG_MODE(bRm))
6542 {
6543 /*
6544 * Register, register.
6545 */
6546 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6548
6549 IEM_MC_BEGIN(3, 0);
6550 IEM_MC_ARG(uint64_t *, pDst, 0);
6551 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6552 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6553 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6554 IEM_MC_PREPARE_FPU_USAGE();
6555 IEM_MC_FPU_TO_MMX_MODE();
6556
6557 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6558 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6559 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
6560 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6561
6562 IEM_MC_ADVANCE_RIP_AND_FINISH();
6563 IEM_MC_END();
6564 }
6565 else
6566 {
6567 /*
6568 * Register, memory.
6569 */
6570 IEM_MC_BEGIN(3, 2);
6571 IEM_MC_ARG(uint64_t *, pDst, 0);
6572 IEM_MC_LOCAL(uint64_t, uSrc);
6573 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6575
6576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6577 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6578 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6580 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6581 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6582
6583 IEM_MC_PREPARE_FPU_USAGE();
6584 IEM_MC_FPU_TO_MMX_MODE();
6585
6586 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6587 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
6588 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6589
6590 IEM_MC_ADVANCE_RIP_AND_FINISH();
6591 IEM_MC_END();
6592 }
6593}
6594
6595
6596/**
6597 * Common worker for SSE2 instructions on the forms:
6598 * pshufd xmm1, xmm2/mem128, imm8
6599 * pshufhw xmm1, xmm2/mem128, imm8
6600 * pshuflw xmm1, xmm2/mem128, imm8
6601 *
6602 * Proper alignment of the 128-bit operand is enforced.
6603 * Exceptions type 4. SSE2 cpuid checks.
6604 */
6605FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6606{
6607 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6608 if (IEM_IS_MODRM_REG_MODE(bRm))
6609 {
6610 /*
6611 * Register, register.
6612 */
6613 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6615
6616 IEM_MC_BEGIN(3, 0);
6617 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6618 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6619 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6620 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6621 IEM_MC_PREPARE_SSE_USAGE();
6622 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6623 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6624 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
6625 IEM_MC_ADVANCE_RIP_AND_FINISH();
6626 IEM_MC_END();
6627 }
6628 else
6629 {
6630 /*
6631 * Register, memory.
6632 */
6633 IEM_MC_BEGIN(3, 2);
6634 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6635 IEM_MC_LOCAL(RTUINT128U, uSrc);
6636 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6638
6639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6640 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6641 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6643 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6644
6645 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6646 IEM_MC_PREPARE_SSE_USAGE();
6647 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6648 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
6649
6650 IEM_MC_ADVANCE_RIP_AND_FINISH();
6651 IEM_MC_END();
6652 }
6653}
6654
6655
6656/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6657FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6658{
6659 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6660 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6661}
6662
6663
6664/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6665FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6666{
6667 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6668 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6669}
6670
6671
6672/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6673FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6674{
6675 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6676 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6677}
6678
6679
6680/**
6681 * Common worker for MMX instructions of the form:
6682 * psrlw mm, imm8
6683 * psraw mm, imm8
6684 * psllw mm, imm8
6685 * psrld mm, imm8
6686 * psrad mm, imm8
6687 * pslld mm, imm8
6688 * psrlq mm, imm8
6689 * psllq mm, imm8
6690 *
6691 */
6692FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6693{
6694 if (IEM_IS_MODRM_REG_MODE(bRm))
6695 {
6696 /*
6697 * Register, immediate.
6698 */
6699 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6701
6702 IEM_MC_BEGIN(2, 0);
6703 IEM_MC_ARG(uint64_t *, pDst, 0);
6704 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6705 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6706 IEM_MC_PREPARE_FPU_USAGE();
6707 IEM_MC_FPU_TO_MMX_MODE();
6708
6709 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6710 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6711 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6712
6713 IEM_MC_ADVANCE_RIP_AND_FINISH();
6714 IEM_MC_END();
6715 }
6716 else
6717 {
6718 /*
6719 * Register, memory not supported.
6720 */
6721 /// @todo Caller already enforced register mode?!
6722 AssertFailedReturn(VINF_SUCCESS);
6723 }
6724}
6725
6726
6727/**
6728 * Common worker for SSE2 instructions of the form:
6729 * psrlw xmm, imm8
6730 * psraw xmm, imm8
6731 * psllw xmm, imm8
6732 * psrld xmm, imm8
6733 * psrad xmm, imm8
6734 * pslld xmm, imm8
6735 * psrlq xmm, imm8
6736 * psllq xmm, imm8
6737 *
6738 */
6739FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6740{
6741 if (IEM_IS_MODRM_REG_MODE(bRm))
6742 {
6743 /*
6744 * Register, immediate.
6745 */
6746 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6748
6749 IEM_MC_BEGIN(2, 0);
6750 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6751 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6752 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6753 IEM_MC_PREPARE_SSE_USAGE();
6754 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6755 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6756 IEM_MC_ADVANCE_RIP_AND_FINISH();
6757 IEM_MC_END();
6758 }
6759 else
6760 {
6761 /*
6762 * Register, memory.
6763 */
6764 /// @todo Caller already enforced register mode?!
6765 AssertFailedReturn(VINF_SUCCESS);
6766 }
6767}
6768
6769
6770/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6771FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6772{
6773// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6774 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6775}
6776
6777
6778/** Opcode 0x66 0x0f 0x71 11/2. */
6779FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6780{
6781// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6782 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6783}
6784
6785
6786/** Opcode 0x0f 0x71 11/4. */
6787FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6788{
6789// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6790 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6791}
6792
6793
6794/** Opcode 0x66 0x0f 0x71 11/4. */
6795FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6796{
6797// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6798 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6799}
6800
6801
6802/** Opcode 0x0f 0x71 11/6. */
6803FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6804{
6805// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6806 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6807}
6808
6809
6810/** Opcode 0x66 0x0f 0x71 11/6. */
6811FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6812{
6813// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6814 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6815}
6816
6817
6818/**
6819 * Group 12 jump table for register variant.
6820 */
6821IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6822{
6823 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6824 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6825 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6826 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6827 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6828 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6829 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6830 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6831};
6832AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6833
6834
6835/** Opcode 0x0f 0x71. */
6836FNIEMOP_DEF(iemOp_Grp12)
6837{
6838 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6839 if (IEM_IS_MODRM_REG_MODE(bRm))
6840 /* register, register */
6841 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6842 + pVCpu->iem.s.idxPrefix], bRm);
6843 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6844}
6845
6846
6847/** Opcode 0x0f 0x72 11/2. */
6848FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6849{
6850// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6851 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6852}
6853
6854
6855/** Opcode 0x66 0x0f 0x72 11/2. */
6856FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6857{
6858// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6859 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6860}
6861
6862
6863/** Opcode 0x0f 0x72 11/4. */
6864FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6865{
6866// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6867 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6868}
6869
6870
6871/** Opcode 0x66 0x0f 0x72 11/4. */
6872FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6873{
6874// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6875 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6876}
6877
6878
6879/** Opcode 0x0f 0x72 11/6. */
6880FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6881{
6882// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6883 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6884}
6885
6886/** Opcode 0x66 0x0f 0x72 11/6. */
6887FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6888{
6889// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6890 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6891}
6892
6893
6894/**
6895 * Group 13 jump table for register variant.
6896 */
6897IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6898{
6899 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6900 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6901 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6902 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6903 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6904 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6905 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6906 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6907};
6908AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6909
6910/** Opcode 0x0f 0x72. */
6911FNIEMOP_DEF(iemOp_Grp13)
6912{
6913 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6914 if (IEM_IS_MODRM_REG_MODE(bRm))
6915 /* register, register */
6916 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6917 + pVCpu->iem.s.idxPrefix], bRm);
6918 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6919}
6920
6921
6922/** Opcode 0x0f 0x73 11/2. */
6923FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6924{
6925// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6926 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6927}
6928
6929
6930/** Opcode 0x66 0x0f 0x73 11/2. */
6931FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6932{
6933// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6934 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6935}
6936
6937
6938/** Opcode 0x66 0x0f 0x73 11/3. */
6939FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6940{
6941// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6942 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6943}
6944
6945
6946/** Opcode 0x0f 0x73 11/6. */
6947FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6948{
6949// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6950 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6951}
6952
6953
6954/** Opcode 0x66 0x0f 0x73 11/6. */
6955FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6956{
6957// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6958 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6959}
6960
6961
6962/** Opcode 0x66 0x0f 0x73 11/7. */
6963FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6964{
6965// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6966 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6967}
6968
6969/**
6970 * Group 14 jump table for register variant.
6971 */
6972IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6973{
6974 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6975 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6976 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6977 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6978 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6979 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6980 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6981 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6982};
6983AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6984
6985
6986/** Opcode 0x0f 0x73. */
6987FNIEMOP_DEF(iemOp_Grp14)
6988{
6989 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6990 if (IEM_IS_MODRM_REG_MODE(bRm))
6991 /* register, register */
6992 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6993 + pVCpu->iem.s.idxPrefix], bRm);
6994 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6995}
6996
6997
6998/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
6999FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
7000{
7001 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7002 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
7003}
7004
7005
7006/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
7007FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
7008{
7009 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7010 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
7011}
7012
7013
7014/* Opcode 0xf3 0x0f 0x74 - invalid */
7015/* Opcode 0xf2 0x0f 0x74 - invalid */
7016
7017
7018/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
7019FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
7020{
7021 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7022 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
7023}
7024
7025
7026/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
7027FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
7028{
7029 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7030 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
7031}
7032
7033
7034/* Opcode 0xf3 0x0f 0x75 - invalid */
7035/* Opcode 0xf2 0x0f 0x75 - invalid */
7036
7037
7038/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
7039FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
7040{
7041 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7042 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
7043}
7044
7045
7046/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
7047FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
7048{
7049 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7050 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
7051}
7052
7053
7054/* Opcode 0xf3 0x0f 0x76 - invalid */
7055/* Opcode 0xf2 0x0f 0x76 - invalid */
7056
7057
7058/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
7059FNIEMOP_DEF(iemOp_emms)
7060{
7061 IEMOP_MNEMONIC(emms, "emms");
7062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7063
7064 IEM_MC_BEGIN(0,0);
7065 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7066 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7067 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7068 IEM_MC_FPU_FROM_MMX_MODE();
7069 IEM_MC_ADVANCE_RIP_AND_FINISH();
7070 IEM_MC_END();
7071}
7072
7073/* Opcode 0x66 0x0f 0x77 - invalid */
7074/* Opcode 0xf3 0x0f 0x77 - invalid */
7075/* Opcode 0xf2 0x0f 0x77 - invalid */
7076
7077/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
7078#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7079FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
7080{
7081 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
7082 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
7083 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
7084 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
7085
7086 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7087 if (IEM_IS_MODRM_REG_MODE(bRm))
7088 {
7089 /*
7090 * Register, register.
7091 */
7092 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7093 if (enmEffOpSize == IEMMODE_64BIT)
7094 {
7095 IEM_MC_BEGIN(2, 0);
7096 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7097 IEM_MC_ARG(uint64_t, u64Enc, 1);
7098 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7099 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7100 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
7101 IEM_MC_END();
7102 }
7103 else
7104 {
7105 IEM_MC_BEGIN(2, 0);
7106 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7107 IEM_MC_ARG(uint32_t, u32Enc, 1);
7108 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7109 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7110 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
7111 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7112 IEM_MC_END();
7113 }
7114 }
7115 else
7116 {
7117 /*
7118 * Memory, register.
7119 */
7120 if (enmEffOpSize == IEMMODE_64BIT)
7121 {
7122 IEM_MC_BEGIN(3, 0);
7123 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7124 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7125 IEM_MC_ARG(uint64_t, u64Enc, 2);
7126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7127 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7128 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7129 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7130 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7131 IEM_MC_END();
7132 }
7133 else
7134 {
7135 IEM_MC_BEGIN(3, 0);
7136 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7137 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7138 IEM_MC_ARG(uint32_t, u32Enc, 2);
7139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7140 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7141 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7142 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7143 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7144 IEM_MC_END();
7145 }
7146 }
7147 return VINF_SUCCESS;
7148}
7149#else
7150FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
7151#endif
7152
7153/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7154FNIEMOP_STUB(iemOp_AmdGrp17);
7155/* Opcode 0xf3 0x0f 0x78 - invalid */
7156/* Opcode 0xf2 0x0f 0x78 - invalid */
7157
7158/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7159#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7160FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7161{
7162 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7163 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7164 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7165 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
7166
7167 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7168 if (IEM_IS_MODRM_REG_MODE(bRm))
7169 {
7170 /*
7171 * Register, register.
7172 */
7173 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7174 if (enmEffOpSize == IEMMODE_64BIT)
7175 {
7176 IEM_MC_BEGIN(2, 0);
7177 IEM_MC_ARG(uint64_t, u64Val, 0);
7178 IEM_MC_ARG(uint64_t, u64Enc, 1);
7179 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7180 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7181 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
7182 IEM_MC_END();
7183 }
7184 else
7185 {
7186 IEM_MC_BEGIN(2, 0);
7187 IEM_MC_ARG(uint32_t, u32Val, 0);
7188 IEM_MC_ARG(uint32_t, u32Enc, 1);
7189 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7190 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7191 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
7192 IEM_MC_END();
7193 }
7194 }
7195 else
7196 {
7197 /*
7198 * Register, memory.
7199 */
7200 if (enmEffOpSize == IEMMODE_64BIT)
7201 {
7202 IEM_MC_BEGIN(3, 0);
7203 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7204 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7205 IEM_MC_ARG(uint64_t, u64Enc, 2);
7206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7207 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7208 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7209 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7210 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7211 IEM_MC_END();
7212 }
7213 else
7214 {
7215 IEM_MC_BEGIN(3, 0);
7216 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7217 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7218 IEM_MC_ARG(uint32_t, u32Enc, 2);
7219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7220 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7221 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7222 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7223 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7224 IEM_MC_END();
7225 }
7226 }
7227 return VINF_SUCCESS;
7228}
7229#else
7230FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
7231#endif
7232/* Opcode 0x66 0x0f 0x79 - invalid */
7233/* Opcode 0xf3 0x0f 0x79 - invalid */
7234/* Opcode 0xf2 0x0f 0x79 - invalid */
7235
7236/* Opcode 0x0f 0x7a - invalid */
7237/* Opcode 0x66 0x0f 0x7a - invalid */
7238/* Opcode 0xf3 0x0f 0x7a - invalid */
7239/* Opcode 0xf2 0x0f 0x7a - invalid */
7240
7241/* Opcode 0x0f 0x7b - invalid */
7242/* Opcode 0x66 0x0f 0x7b - invalid */
7243/* Opcode 0xf3 0x0f 0x7b - invalid */
7244/* Opcode 0xf2 0x0f 0x7b - invalid */
7245
7246/* Opcode 0x0f 0x7c - invalid */
7247
7248
7249/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7250FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7251{
7252 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7253 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7254}
7255
7256
7257/* Opcode 0xf3 0x0f 0x7c - invalid */
7258
7259
7260/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7261FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7262{
7263 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7264 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7265}
7266
7267
7268/* Opcode 0x0f 0x7d - invalid */
7269
7270
7271/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7272FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7273{
7274 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7275 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7276}
7277
7278
7279/* Opcode 0xf3 0x0f 0x7d - invalid */
7280
7281
7282/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7283FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7284{
7285 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7286 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7287}
7288
7289
7290/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7291FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7292{
7293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7294 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7295 {
7296 /**
7297 * @opcode 0x7e
7298 * @opcodesub rex.w=1
7299 * @oppfx none
7300 * @opcpuid mmx
7301 * @opgroup og_mmx_datamove
7302 * @opxcpttype 5
7303 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7304 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7305 */
7306 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7307 if (IEM_IS_MODRM_REG_MODE(bRm))
7308 {
7309 /* greg64, MMX */
7310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7311 IEM_MC_BEGIN(0, 1);
7312 IEM_MC_LOCAL(uint64_t, u64Tmp);
7313
7314 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7315 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7316 IEM_MC_FPU_TO_MMX_MODE();
7317
7318 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7319 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7320
7321 IEM_MC_ADVANCE_RIP_AND_FINISH();
7322 IEM_MC_END();
7323 }
7324 else
7325 {
7326 /* [mem64], MMX */
7327 IEM_MC_BEGIN(0, 2);
7328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7329 IEM_MC_LOCAL(uint64_t, u64Tmp);
7330
7331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7333 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7334 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7335 IEM_MC_FPU_TO_MMX_MODE();
7336
7337 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7338 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7339
7340 IEM_MC_ADVANCE_RIP_AND_FINISH();
7341 IEM_MC_END();
7342 }
7343 }
7344 else
7345 {
7346 /**
7347 * @opdone
7348 * @opcode 0x7e
7349 * @opcodesub rex.w=0
7350 * @oppfx none
7351 * @opcpuid mmx
7352 * @opgroup og_mmx_datamove
7353 * @opxcpttype 5
7354 * @opfunction iemOp_movd_q_Pd_Ey
7355 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7356 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7357 */
7358 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7359 if (IEM_IS_MODRM_REG_MODE(bRm))
7360 {
7361 /* greg32, MMX */
7362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7363 IEM_MC_BEGIN(0, 1);
7364 IEM_MC_LOCAL(uint32_t, u32Tmp);
7365
7366 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7367 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7368 IEM_MC_FPU_TO_MMX_MODE();
7369
7370 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7371 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7372
7373 IEM_MC_ADVANCE_RIP_AND_FINISH();
7374 IEM_MC_END();
7375 }
7376 else
7377 {
7378 /* [mem32], MMX */
7379 IEM_MC_BEGIN(0, 2);
7380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7381 IEM_MC_LOCAL(uint32_t, u32Tmp);
7382
7383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7385 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7386 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7387 IEM_MC_FPU_TO_MMX_MODE();
7388
7389 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7390 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7391
7392 IEM_MC_ADVANCE_RIP_AND_FINISH();
7393 IEM_MC_END();
7394 }
7395 }
7396}
7397
7398
7399FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7400{
7401 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7402 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7403 {
7404 /**
7405 * @opcode 0x7e
7406 * @opcodesub rex.w=1
7407 * @oppfx 0x66
7408 * @opcpuid sse2
7409 * @opgroup og_sse2_simdint_datamove
7410 * @opxcpttype 5
7411 * @optest 64-bit / op1=1 op2=2 -> op1=2
7412 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7413 */
7414 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7415 if (IEM_IS_MODRM_REG_MODE(bRm))
7416 {
7417 /* greg64, XMM */
7418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7419 IEM_MC_BEGIN(0, 1);
7420 IEM_MC_LOCAL(uint64_t, u64Tmp);
7421
7422 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7423 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7424
7425 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7426 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7427
7428 IEM_MC_ADVANCE_RIP_AND_FINISH();
7429 IEM_MC_END();
7430 }
7431 else
7432 {
7433 /* [mem64], XMM */
7434 IEM_MC_BEGIN(0, 2);
7435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7436 IEM_MC_LOCAL(uint64_t, u64Tmp);
7437
7438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7440 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7441 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7442
7443 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7444 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7445
7446 IEM_MC_ADVANCE_RIP_AND_FINISH();
7447 IEM_MC_END();
7448 }
7449 }
7450 else
7451 {
7452 /**
7453 * @opdone
7454 * @opcode 0x7e
7455 * @opcodesub rex.w=0
7456 * @oppfx 0x66
7457 * @opcpuid sse2
7458 * @opgroup og_sse2_simdint_datamove
7459 * @opxcpttype 5
7460 * @opfunction iemOp_movd_q_Vy_Ey
7461 * @optest op1=1 op2=2 -> op1=2
7462 * @optest op1=0 op2=-42 -> op1=-42
7463 */
7464 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7465 if (IEM_IS_MODRM_REG_MODE(bRm))
7466 {
7467 /* greg32, XMM */
7468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7469 IEM_MC_BEGIN(0, 1);
7470 IEM_MC_LOCAL(uint32_t, u32Tmp);
7471
7472 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7473 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7474
7475 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7476 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7477
7478 IEM_MC_ADVANCE_RIP_AND_FINISH();
7479 IEM_MC_END();
7480 }
7481 else
7482 {
7483 /* [mem32], XMM */
7484 IEM_MC_BEGIN(0, 2);
7485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7486 IEM_MC_LOCAL(uint32_t, u32Tmp);
7487
7488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7490 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7491 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7492
7493 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7494 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7495
7496 IEM_MC_ADVANCE_RIP_AND_FINISH();
7497 IEM_MC_END();
7498 }
7499 }
7500}
7501
7502/**
7503 * @opcode 0x7e
7504 * @oppfx 0xf3
7505 * @opcpuid sse2
7506 * @opgroup og_sse2_pcksclr_datamove
7507 * @opxcpttype none
7508 * @optest op1=1 op2=2 -> op1=2
7509 * @optest op1=0 op2=-42 -> op1=-42
7510 */
7511FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7512{
7513 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7515 if (IEM_IS_MODRM_REG_MODE(bRm))
7516 {
7517 /*
7518 * Register, register.
7519 */
7520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7521 IEM_MC_BEGIN(0, 2);
7522 IEM_MC_LOCAL(uint64_t, uSrc);
7523
7524 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7525 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7526
7527 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
7528 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7529
7530 IEM_MC_ADVANCE_RIP_AND_FINISH();
7531 IEM_MC_END();
7532 }
7533 else
7534 {
7535 /*
7536 * Memory, register.
7537 */
7538 IEM_MC_BEGIN(0, 2);
7539 IEM_MC_LOCAL(uint64_t, uSrc);
7540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7541
7542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7544 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7545 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7546
7547 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7548 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7549
7550 IEM_MC_ADVANCE_RIP_AND_FINISH();
7551 IEM_MC_END();
7552 }
7553}
7554
7555/* Opcode 0xf2 0x0f 0x7e - invalid */
7556
7557
7558/** Opcode 0x0f 0x7f - movq Qq, Pq */
7559FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7560{
7561 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7562 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7563 if (IEM_IS_MODRM_REG_MODE(bRm))
7564 {
7565 /*
7566 * Register, register.
7567 */
7568 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7569 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7571 IEM_MC_BEGIN(0, 1);
7572 IEM_MC_LOCAL(uint64_t, u64Tmp);
7573 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7574 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7575 IEM_MC_FPU_TO_MMX_MODE();
7576
7577 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7578 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7579
7580 IEM_MC_ADVANCE_RIP_AND_FINISH();
7581 IEM_MC_END();
7582 }
7583 else
7584 {
7585 /*
7586 * Memory, Register.
7587 */
7588 IEM_MC_BEGIN(0, 2);
7589 IEM_MC_LOCAL(uint64_t, u64Tmp);
7590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7591
7592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7594 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7595 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7596 IEM_MC_FPU_TO_MMX_MODE();
7597
7598 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7599 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7600
7601 IEM_MC_ADVANCE_RIP_AND_FINISH();
7602 IEM_MC_END();
7603 }
7604}
7605
7606/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7607FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7608{
7609 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7610 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7611 if (IEM_IS_MODRM_REG_MODE(bRm))
7612 {
7613 /*
7614 * Register, register.
7615 */
7616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7617 IEM_MC_BEGIN(0, 0);
7618 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7619 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7620 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7621 IEM_GET_MODRM_REG(pVCpu, bRm));
7622 IEM_MC_ADVANCE_RIP_AND_FINISH();
7623 IEM_MC_END();
7624 }
7625 else
7626 {
7627 /*
7628 * Register, memory.
7629 */
7630 IEM_MC_BEGIN(0, 2);
7631 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7633
7634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7636 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7637 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7638
7639 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7640 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7641
7642 IEM_MC_ADVANCE_RIP_AND_FINISH();
7643 IEM_MC_END();
7644 }
7645}
7646
7647/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7648FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7649{
7650 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7651 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7652 if (IEM_IS_MODRM_REG_MODE(bRm))
7653 {
7654 /*
7655 * Register, register.
7656 */
7657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7658 IEM_MC_BEGIN(0, 0);
7659 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7660 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7661 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7662 IEM_GET_MODRM_REG(pVCpu, bRm));
7663 IEM_MC_ADVANCE_RIP_AND_FINISH();
7664 IEM_MC_END();
7665 }
7666 else
7667 {
7668 /*
7669 * Register, memory.
7670 */
7671 IEM_MC_BEGIN(0, 2);
7672 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7674
7675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7677 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7678 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7679
7680 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7681 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7682
7683 IEM_MC_ADVANCE_RIP_AND_FINISH();
7684 IEM_MC_END();
7685 }
7686}
7687
7688/* Opcode 0xf2 0x0f 0x7f - invalid */
7689
7690
7691
7692/** Opcode 0x0f 0x80. */
7693FNIEMOP_DEF(iemOp_jo_Jv)
7694{
7695 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7696 IEMOP_HLP_MIN_386();
7697 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7698 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7699 {
7700 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7702
7703 IEM_MC_BEGIN(0, 0);
7704 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7705 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7706 } IEM_MC_ELSE() {
7707 IEM_MC_ADVANCE_RIP_AND_FINISH();
7708 } IEM_MC_ENDIF();
7709 IEM_MC_END();
7710 }
7711 else
7712 {
7713 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7715
7716 IEM_MC_BEGIN(0, 0);
7717 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7718 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7719 } IEM_MC_ELSE() {
7720 IEM_MC_ADVANCE_RIP_AND_FINISH();
7721 } IEM_MC_ENDIF();
7722 IEM_MC_END();
7723 }
7724}
7725
7726
7727/** Opcode 0x0f 0x81. */
7728FNIEMOP_DEF(iemOp_jno_Jv)
7729{
7730 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7731 IEMOP_HLP_MIN_386();
7732 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7733 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7734 {
7735 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7737
7738 IEM_MC_BEGIN(0, 0);
7739 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7740 IEM_MC_ADVANCE_RIP_AND_FINISH();
7741 } IEM_MC_ELSE() {
7742 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7743 } IEM_MC_ENDIF();
7744 IEM_MC_END();
7745 }
7746 else
7747 {
7748 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7750
7751 IEM_MC_BEGIN(0, 0);
7752 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7753 IEM_MC_ADVANCE_RIP_AND_FINISH();
7754 } IEM_MC_ELSE() {
7755 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7756 } IEM_MC_ENDIF();
7757 IEM_MC_END();
7758 }
7759}
7760
7761
7762/** Opcode 0x0f 0x82. */
7763FNIEMOP_DEF(iemOp_jc_Jv)
7764{
7765 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7766 IEMOP_HLP_MIN_386();
7767 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7768 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7769 {
7770 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7772
7773 IEM_MC_BEGIN(0, 0);
7774 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7775 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7776 } IEM_MC_ELSE() {
7777 IEM_MC_ADVANCE_RIP_AND_FINISH();
7778 } IEM_MC_ENDIF();
7779 IEM_MC_END();
7780 }
7781 else
7782 {
7783 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7785
7786 IEM_MC_BEGIN(0, 0);
7787 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7788 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7789 } IEM_MC_ELSE() {
7790 IEM_MC_ADVANCE_RIP_AND_FINISH();
7791 } IEM_MC_ENDIF();
7792 IEM_MC_END();
7793 }
7794}
7795
7796
7797/** Opcode 0x0f 0x83. */
7798FNIEMOP_DEF(iemOp_jnc_Jv)
7799{
7800 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7801 IEMOP_HLP_MIN_386();
7802 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7803 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7804 {
7805 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7807
7808 IEM_MC_BEGIN(0, 0);
7809 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7810 IEM_MC_ADVANCE_RIP_AND_FINISH();
7811 } IEM_MC_ELSE() {
7812 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7813 } IEM_MC_ENDIF();
7814 IEM_MC_END();
7815 }
7816 else
7817 {
7818 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7820
7821 IEM_MC_BEGIN(0, 0);
7822 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7823 IEM_MC_ADVANCE_RIP_AND_FINISH();
7824 } IEM_MC_ELSE() {
7825 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7826 } IEM_MC_ENDIF();
7827 IEM_MC_END();
7828 }
7829}
7830
7831
7832/** Opcode 0x0f 0x84. */
7833FNIEMOP_DEF(iemOp_je_Jv)
7834{
7835 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7836 IEMOP_HLP_MIN_386();
7837 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7838 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7839 {
7840 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7842
7843 IEM_MC_BEGIN(0, 0);
7844 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7845 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7846 } IEM_MC_ELSE() {
7847 IEM_MC_ADVANCE_RIP_AND_FINISH();
7848 } IEM_MC_ENDIF();
7849 IEM_MC_END();
7850 }
7851 else
7852 {
7853 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7855
7856 IEM_MC_BEGIN(0, 0);
7857 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7858 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7859 } IEM_MC_ELSE() {
7860 IEM_MC_ADVANCE_RIP_AND_FINISH();
7861 } IEM_MC_ENDIF();
7862 IEM_MC_END();
7863 }
7864}
7865
7866
7867/** Opcode 0x0f 0x85. */
7868FNIEMOP_DEF(iemOp_jne_Jv)
7869{
7870 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7871 IEMOP_HLP_MIN_386();
7872 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7873 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7874 {
7875 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7877
7878 IEM_MC_BEGIN(0, 0);
7879 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7880 IEM_MC_ADVANCE_RIP_AND_FINISH();
7881 } IEM_MC_ELSE() {
7882 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7883 } IEM_MC_ENDIF();
7884 IEM_MC_END();
7885 }
7886 else
7887 {
7888 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7890
7891 IEM_MC_BEGIN(0, 0);
7892 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7893 IEM_MC_ADVANCE_RIP_AND_FINISH();
7894 } IEM_MC_ELSE() {
7895 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7896 } IEM_MC_ENDIF();
7897 IEM_MC_END();
7898 }
7899}
7900
7901
7902/** Opcode 0x0f 0x86. */
7903FNIEMOP_DEF(iemOp_jbe_Jv)
7904{
7905 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7906 IEMOP_HLP_MIN_386();
7907 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7908 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7909 {
7910 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7912
7913 IEM_MC_BEGIN(0, 0);
7914 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7915 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7916 } IEM_MC_ELSE() {
7917 IEM_MC_ADVANCE_RIP_AND_FINISH();
7918 } IEM_MC_ENDIF();
7919 IEM_MC_END();
7920 }
7921 else
7922 {
7923 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7925
7926 IEM_MC_BEGIN(0, 0);
7927 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7928 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7929 } IEM_MC_ELSE() {
7930 IEM_MC_ADVANCE_RIP_AND_FINISH();
7931 } IEM_MC_ENDIF();
7932 IEM_MC_END();
7933 }
7934}
7935
7936
7937/** Opcode 0x0f 0x87. */
7938FNIEMOP_DEF(iemOp_jnbe_Jv)
7939{
7940 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7941 IEMOP_HLP_MIN_386();
7942 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7943 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7944 {
7945 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7947
7948 IEM_MC_BEGIN(0, 0);
7949 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7950 IEM_MC_ADVANCE_RIP_AND_FINISH();
7951 } IEM_MC_ELSE() {
7952 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7953 } IEM_MC_ENDIF();
7954 IEM_MC_END();
7955 }
7956 else
7957 {
7958 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7960
7961 IEM_MC_BEGIN(0, 0);
7962 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7963 IEM_MC_ADVANCE_RIP_AND_FINISH();
7964 } IEM_MC_ELSE() {
7965 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7966 } IEM_MC_ENDIF();
7967 IEM_MC_END();
7968 }
7969}
7970
7971
7972/** Opcode 0x0f 0x88. */
7973FNIEMOP_DEF(iemOp_js_Jv)
7974{
7975 IEMOP_MNEMONIC(js_Jv, "js Jv");
7976 IEMOP_HLP_MIN_386();
7977 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7978 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7979 {
7980 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7982
7983 IEM_MC_BEGIN(0, 0);
7984 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7985 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7986 } IEM_MC_ELSE() {
7987 IEM_MC_ADVANCE_RIP_AND_FINISH();
7988 } IEM_MC_ENDIF();
7989 IEM_MC_END();
7990 }
7991 else
7992 {
7993 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7995
7996 IEM_MC_BEGIN(0, 0);
7997 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7998 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7999 } IEM_MC_ELSE() {
8000 IEM_MC_ADVANCE_RIP_AND_FINISH();
8001 } IEM_MC_ENDIF();
8002 IEM_MC_END();
8003 }
8004}
8005
8006
8007/** Opcode 0x0f 0x89. */
8008FNIEMOP_DEF(iemOp_jns_Jv)
8009{
8010 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
8011 IEMOP_HLP_MIN_386();
8012 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8013 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8014 {
8015 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8017
8018 IEM_MC_BEGIN(0, 0);
8019 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8020 IEM_MC_ADVANCE_RIP_AND_FINISH();
8021 } IEM_MC_ELSE() {
8022 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8023 } IEM_MC_ENDIF();
8024 IEM_MC_END();
8025 }
8026 else
8027 {
8028 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8030
8031 IEM_MC_BEGIN(0, 0);
8032 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8033 IEM_MC_ADVANCE_RIP_AND_FINISH();
8034 } IEM_MC_ELSE() {
8035 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8036 } IEM_MC_ENDIF();
8037 IEM_MC_END();
8038 }
8039}
8040
8041
8042/** Opcode 0x0f 0x8a. */
8043FNIEMOP_DEF(iemOp_jp_Jv)
8044{
8045 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
8046 IEMOP_HLP_MIN_386();
8047 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8048 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8049 {
8050 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8052
8053 IEM_MC_BEGIN(0, 0);
8054 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8055 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8056 } IEM_MC_ELSE() {
8057 IEM_MC_ADVANCE_RIP_AND_FINISH();
8058 } IEM_MC_ENDIF();
8059 IEM_MC_END();
8060 }
8061 else
8062 {
8063 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8065
8066 IEM_MC_BEGIN(0, 0);
8067 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8068 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8069 } IEM_MC_ELSE() {
8070 IEM_MC_ADVANCE_RIP_AND_FINISH();
8071 } IEM_MC_ENDIF();
8072 IEM_MC_END();
8073 }
8074}
8075
8076
8077/** Opcode 0x0f 0x8b. */
8078FNIEMOP_DEF(iemOp_jnp_Jv)
8079{
8080 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
8081 IEMOP_HLP_MIN_386();
8082 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8083 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8084 {
8085 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8087
8088 IEM_MC_BEGIN(0, 0);
8089 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8090 IEM_MC_ADVANCE_RIP_AND_FINISH();
8091 } IEM_MC_ELSE() {
8092 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8093 } IEM_MC_ENDIF();
8094 IEM_MC_END();
8095 }
8096 else
8097 {
8098 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8100
8101 IEM_MC_BEGIN(0, 0);
8102 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8103 IEM_MC_ADVANCE_RIP_AND_FINISH();
8104 } IEM_MC_ELSE() {
8105 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8106 } IEM_MC_ENDIF();
8107 IEM_MC_END();
8108 }
8109}
8110
8111
8112/** Opcode 0x0f 0x8c. */
8113FNIEMOP_DEF(iemOp_jl_Jv)
8114{
8115 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8116 IEMOP_HLP_MIN_386();
8117 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8118 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8119 {
8120 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8122
8123 IEM_MC_BEGIN(0, 0);
8124 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8125 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8126 } IEM_MC_ELSE() {
8127 IEM_MC_ADVANCE_RIP_AND_FINISH();
8128 } IEM_MC_ENDIF();
8129 IEM_MC_END();
8130 }
8131 else
8132 {
8133 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8135
8136 IEM_MC_BEGIN(0, 0);
8137 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8138 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8139 } IEM_MC_ELSE() {
8140 IEM_MC_ADVANCE_RIP_AND_FINISH();
8141 } IEM_MC_ENDIF();
8142 IEM_MC_END();
8143 }
8144}
8145
8146
8147/** Opcode 0x0f 0x8d. */
8148FNIEMOP_DEF(iemOp_jnl_Jv)
8149{
8150 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8151 IEMOP_HLP_MIN_386();
8152 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8153 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8154 {
8155 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8157
8158 IEM_MC_BEGIN(0, 0);
8159 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8160 IEM_MC_ADVANCE_RIP_AND_FINISH();
8161 } IEM_MC_ELSE() {
8162 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8163 } IEM_MC_ENDIF();
8164 IEM_MC_END();
8165 }
8166 else
8167 {
8168 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8170
8171 IEM_MC_BEGIN(0, 0);
8172 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8173 IEM_MC_ADVANCE_RIP_AND_FINISH();
8174 } IEM_MC_ELSE() {
8175 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8176 } IEM_MC_ENDIF();
8177 IEM_MC_END();
8178 }
8179}
8180
8181
8182/** Opcode 0x0f 0x8e. */
8183FNIEMOP_DEF(iemOp_jle_Jv)
8184{
8185 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8186 IEMOP_HLP_MIN_386();
8187 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8188 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8189 {
8190 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8192
8193 IEM_MC_BEGIN(0, 0);
8194 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8195 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8196 } IEM_MC_ELSE() {
8197 IEM_MC_ADVANCE_RIP_AND_FINISH();
8198 } IEM_MC_ENDIF();
8199 IEM_MC_END();
8200 }
8201 else
8202 {
8203 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8205
8206 IEM_MC_BEGIN(0, 0);
8207 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8208 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8209 } IEM_MC_ELSE() {
8210 IEM_MC_ADVANCE_RIP_AND_FINISH();
8211 } IEM_MC_ENDIF();
8212 IEM_MC_END();
8213 }
8214}
8215
8216
8217/** Opcode 0x0f 0x8f. */
8218FNIEMOP_DEF(iemOp_jnle_Jv)
8219{
8220 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8221 IEMOP_HLP_MIN_386();
8222 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8223 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8224 {
8225 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8227
8228 IEM_MC_BEGIN(0, 0);
8229 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8230 IEM_MC_ADVANCE_RIP_AND_FINISH();
8231 } IEM_MC_ELSE() {
8232 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8233 } IEM_MC_ENDIF();
8234 IEM_MC_END();
8235 }
8236 else
8237 {
8238 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8240
8241 IEM_MC_BEGIN(0, 0);
8242 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8243 IEM_MC_ADVANCE_RIP_AND_FINISH();
8244 } IEM_MC_ELSE() {
8245 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8246 } IEM_MC_ENDIF();
8247 IEM_MC_END();
8248 }
8249}
8250
8251
8252/** Opcode 0x0f 0x90. */
8253FNIEMOP_DEF(iemOp_seto_Eb)
8254{
8255 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8256 IEMOP_HLP_MIN_386();
8257 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8258
8259 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8260 * any way. AMD says it's "unused", whatever that means. We're
8261 * ignoring for now. */
8262 if (IEM_IS_MODRM_REG_MODE(bRm))
8263 {
8264 /* register target */
8265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8266 IEM_MC_BEGIN(0, 0);
8267 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8268 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8269 } IEM_MC_ELSE() {
8270 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8271 } IEM_MC_ENDIF();
8272 IEM_MC_ADVANCE_RIP_AND_FINISH();
8273 IEM_MC_END();
8274 }
8275 else
8276 {
8277 /* memory target */
8278 IEM_MC_BEGIN(0, 1);
8279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8282 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8283 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8284 } IEM_MC_ELSE() {
8285 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8286 } IEM_MC_ENDIF();
8287 IEM_MC_ADVANCE_RIP_AND_FINISH();
8288 IEM_MC_END();
8289 }
8290}
8291
8292
8293/** Opcode 0x0f 0x91. */
8294FNIEMOP_DEF(iemOp_setno_Eb)
8295{
8296 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8297 IEMOP_HLP_MIN_386();
8298 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8299
8300 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8301 * any way. AMD says it's "unused", whatever that means. We're
8302 * ignoring for now. */
8303 if (IEM_IS_MODRM_REG_MODE(bRm))
8304 {
8305 /* register target */
8306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8307 IEM_MC_BEGIN(0, 0);
8308 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8309 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8310 } IEM_MC_ELSE() {
8311 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8312 } IEM_MC_ENDIF();
8313 IEM_MC_ADVANCE_RIP_AND_FINISH();
8314 IEM_MC_END();
8315 }
8316 else
8317 {
8318 /* memory target */
8319 IEM_MC_BEGIN(0, 1);
8320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8323 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8324 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8325 } IEM_MC_ELSE() {
8326 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8327 } IEM_MC_ENDIF();
8328 IEM_MC_ADVANCE_RIP_AND_FINISH();
8329 IEM_MC_END();
8330 }
8331}
8332
8333
8334/** Opcode 0x0f 0x92. */
8335FNIEMOP_DEF(iemOp_setc_Eb)
8336{
8337 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8338 IEMOP_HLP_MIN_386();
8339 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8340
8341 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8342 * any way. AMD says it's "unused", whatever that means. We're
8343 * ignoring for now. */
8344 if (IEM_IS_MODRM_REG_MODE(bRm))
8345 {
8346 /* register target */
8347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8348 IEM_MC_BEGIN(0, 0);
8349 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8350 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8351 } IEM_MC_ELSE() {
8352 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8353 } IEM_MC_ENDIF();
8354 IEM_MC_ADVANCE_RIP_AND_FINISH();
8355 IEM_MC_END();
8356 }
8357 else
8358 {
8359 /* memory target */
8360 IEM_MC_BEGIN(0, 1);
8361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8364 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8365 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8366 } IEM_MC_ELSE() {
8367 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8368 } IEM_MC_ENDIF();
8369 IEM_MC_ADVANCE_RIP_AND_FINISH();
8370 IEM_MC_END();
8371 }
8372}
8373
8374
8375/** Opcode 0x0f 0x93. */
8376FNIEMOP_DEF(iemOp_setnc_Eb)
8377{
8378 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8379 IEMOP_HLP_MIN_386();
8380 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8381
8382 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8383 * any way. AMD says it's "unused", whatever that means. We're
8384 * ignoring for now. */
8385 if (IEM_IS_MODRM_REG_MODE(bRm))
8386 {
8387 /* register target */
8388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8389 IEM_MC_BEGIN(0, 0);
8390 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8391 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8392 } IEM_MC_ELSE() {
8393 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8394 } IEM_MC_ENDIF();
8395 IEM_MC_ADVANCE_RIP_AND_FINISH();
8396 IEM_MC_END();
8397 }
8398 else
8399 {
8400 /* memory target */
8401 IEM_MC_BEGIN(0, 1);
8402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8405 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8406 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8407 } IEM_MC_ELSE() {
8408 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8409 } IEM_MC_ENDIF();
8410 IEM_MC_ADVANCE_RIP_AND_FINISH();
8411 IEM_MC_END();
8412 }
8413}
8414
8415
8416/** Opcode 0x0f 0x94. */
8417FNIEMOP_DEF(iemOp_sete_Eb)
8418{
8419 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8420 IEMOP_HLP_MIN_386();
8421 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8422
8423 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8424 * any way. AMD says it's "unused", whatever that means. We're
8425 * ignoring for now. */
8426 if (IEM_IS_MODRM_REG_MODE(bRm))
8427 {
8428 /* register target */
8429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8430 IEM_MC_BEGIN(0, 0);
8431 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8432 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8433 } IEM_MC_ELSE() {
8434 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8435 } IEM_MC_ENDIF();
8436 IEM_MC_ADVANCE_RIP_AND_FINISH();
8437 IEM_MC_END();
8438 }
8439 else
8440 {
8441 /* memory target */
8442 IEM_MC_BEGIN(0, 1);
8443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8444 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8446 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8447 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8448 } IEM_MC_ELSE() {
8449 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8450 } IEM_MC_ENDIF();
8451 IEM_MC_ADVANCE_RIP_AND_FINISH();
8452 IEM_MC_END();
8453 }
8454}
8455
8456
8457/** Opcode 0x0f 0x95. */
8458FNIEMOP_DEF(iemOp_setne_Eb)
8459{
8460 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8461 IEMOP_HLP_MIN_386();
8462 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8463
8464 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8465 * any way. AMD says it's "unused", whatever that means. We're
8466 * ignoring for now. */
8467 if (IEM_IS_MODRM_REG_MODE(bRm))
8468 {
8469 /* register target */
8470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8471 IEM_MC_BEGIN(0, 0);
8472 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8473 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8474 } IEM_MC_ELSE() {
8475 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8476 } IEM_MC_ENDIF();
8477 IEM_MC_ADVANCE_RIP_AND_FINISH();
8478 IEM_MC_END();
8479 }
8480 else
8481 {
8482 /* memory target */
8483 IEM_MC_BEGIN(0, 1);
8484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8487 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8488 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8489 } IEM_MC_ELSE() {
8490 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8491 } IEM_MC_ENDIF();
8492 IEM_MC_ADVANCE_RIP_AND_FINISH();
8493 IEM_MC_END();
8494 }
8495}
8496
8497
8498/** Opcode 0x0f 0x96. */
8499FNIEMOP_DEF(iemOp_setbe_Eb)
8500{
8501 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8502 IEMOP_HLP_MIN_386();
8503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8504
8505 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8506 * any way. AMD says it's "unused", whatever that means. We're
8507 * ignoring for now. */
8508 if (IEM_IS_MODRM_REG_MODE(bRm))
8509 {
8510 /* register target */
8511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8512 IEM_MC_BEGIN(0, 0);
8513 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8514 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8515 } IEM_MC_ELSE() {
8516 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8517 } IEM_MC_ENDIF();
8518 IEM_MC_ADVANCE_RIP_AND_FINISH();
8519 IEM_MC_END();
8520 }
8521 else
8522 {
8523 /* memory target */
8524 IEM_MC_BEGIN(0, 1);
8525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8528 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8529 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8530 } IEM_MC_ELSE() {
8531 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8532 } IEM_MC_ENDIF();
8533 IEM_MC_ADVANCE_RIP_AND_FINISH();
8534 IEM_MC_END();
8535 }
8536}
8537
8538
8539/** Opcode 0x0f 0x97. */
8540FNIEMOP_DEF(iemOp_setnbe_Eb)
8541{
8542 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8543 IEMOP_HLP_MIN_386();
8544 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8545
8546 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8547 * any way. AMD says it's "unused", whatever that means. We're
8548 * ignoring for now. */
8549 if (IEM_IS_MODRM_REG_MODE(bRm))
8550 {
8551 /* register target */
8552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8553 IEM_MC_BEGIN(0, 0);
8554 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8555 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8556 } IEM_MC_ELSE() {
8557 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8558 } IEM_MC_ENDIF();
8559 IEM_MC_ADVANCE_RIP_AND_FINISH();
8560 IEM_MC_END();
8561 }
8562 else
8563 {
8564 /* memory target */
8565 IEM_MC_BEGIN(0, 1);
8566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8569 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8570 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8571 } IEM_MC_ELSE() {
8572 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8573 } IEM_MC_ENDIF();
8574 IEM_MC_ADVANCE_RIP_AND_FINISH();
8575 IEM_MC_END();
8576 }
8577}
8578
8579
8580/** Opcode 0x0f 0x98. */
8581FNIEMOP_DEF(iemOp_sets_Eb)
8582{
8583 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8584 IEMOP_HLP_MIN_386();
8585 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8586
8587 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8588 * any way. AMD says it's "unused", whatever that means. We're
8589 * ignoring for now. */
8590 if (IEM_IS_MODRM_REG_MODE(bRm))
8591 {
8592 /* register target */
8593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8594 IEM_MC_BEGIN(0, 0);
8595 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8596 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8597 } IEM_MC_ELSE() {
8598 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8599 } IEM_MC_ENDIF();
8600 IEM_MC_ADVANCE_RIP_AND_FINISH();
8601 IEM_MC_END();
8602 }
8603 else
8604 {
8605 /* memory target */
8606 IEM_MC_BEGIN(0, 1);
8607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8610 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8611 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8612 } IEM_MC_ELSE() {
8613 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8614 } IEM_MC_ENDIF();
8615 IEM_MC_ADVANCE_RIP_AND_FINISH();
8616 IEM_MC_END();
8617 }
8618}
8619
8620
8621/** Opcode 0x0f 0x99. */
8622FNIEMOP_DEF(iemOp_setns_Eb)
8623{
8624 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8625 IEMOP_HLP_MIN_386();
8626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8627
8628 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8629 * any way. AMD says it's "unused", whatever that means. We're
8630 * ignoring for now. */
8631 if (IEM_IS_MODRM_REG_MODE(bRm))
8632 {
8633 /* register target */
8634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8635 IEM_MC_BEGIN(0, 0);
8636 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8637 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8638 } IEM_MC_ELSE() {
8639 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8640 } IEM_MC_ENDIF();
8641 IEM_MC_ADVANCE_RIP_AND_FINISH();
8642 IEM_MC_END();
8643 }
8644 else
8645 {
8646 /* memory target */
8647 IEM_MC_BEGIN(0, 1);
8648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8651 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8652 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8653 } IEM_MC_ELSE() {
8654 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8655 } IEM_MC_ENDIF();
8656 IEM_MC_ADVANCE_RIP_AND_FINISH();
8657 IEM_MC_END();
8658 }
8659}
8660
8661
8662/** Opcode 0x0f 0x9a. */
8663FNIEMOP_DEF(iemOp_setp_Eb)
8664{
8665 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8666 IEMOP_HLP_MIN_386();
8667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8668
8669 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8670 * any way. AMD says it's "unused", whatever that means. We're
8671 * ignoring for now. */
8672 if (IEM_IS_MODRM_REG_MODE(bRm))
8673 {
8674 /* register target */
8675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8676 IEM_MC_BEGIN(0, 0);
8677 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8678 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8679 } IEM_MC_ELSE() {
8680 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8681 } IEM_MC_ENDIF();
8682 IEM_MC_ADVANCE_RIP_AND_FINISH();
8683 IEM_MC_END();
8684 }
8685 else
8686 {
8687 /* memory target */
8688 IEM_MC_BEGIN(0, 1);
8689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8692 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8693 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8694 } IEM_MC_ELSE() {
8695 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8696 } IEM_MC_ENDIF();
8697 IEM_MC_ADVANCE_RIP_AND_FINISH();
8698 IEM_MC_END();
8699 }
8700}
8701
8702
8703/** Opcode 0x0f 0x9b. */
8704FNIEMOP_DEF(iemOp_setnp_Eb)
8705{
8706 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8707 IEMOP_HLP_MIN_386();
8708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8709
8710 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8711 * any way. AMD says it's "unused", whatever that means. We're
8712 * ignoring for now. */
8713 if (IEM_IS_MODRM_REG_MODE(bRm))
8714 {
8715 /* register target */
8716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8717 IEM_MC_BEGIN(0, 0);
8718 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8719 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8720 } IEM_MC_ELSE() {
8721 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8722 } IEM_MC_ENDIF();
8723 IEM_MC_ADVANCE_RIP_AND_FINISH();
8724 IEM_MC_END();
8725 }
8726 else
8727 {
8728 /* memory target */
8729 IEM_MC_BEGIN(0, 1);
8730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8733 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8734 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8735 } IEM_MC_ELSE() {
8736 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8737 } IEM_MC_ENDIF();
8738 IEM_MC_ADVANCE_RIP_AND_FINISH();
8739 IEM_MC_END();
8740 }
8741}
8742
8743
8744/** Opcode 0x0f 0x9c. */
8745FNIEMOP_DEF(iemOp_setl_Eb)
8746{
8747 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8748 IEMOP_HLP_MIN_386();
8749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8750
8751 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8752 * any way. AMD says it's "unused", whatever that means. We're
8753 * ignoring for now. */
8754 if (IEM_IS_MODRM_REG_MODE(bRm))
8755 {
8756 /* register target */
8757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8758 IEM_MC_BEGIN(0, 0);
8759 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8760 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8761 } IEM_MC_ELSE() {
8762 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8763 } IEM_MC_ENDIF();
8764 IEM_MC_ADVANCE_RIP_AND_FINISH();
8765 IEM_MC_END();
8766 }
8767 else
8768 {
8769 /* memory target */
8770 IEM_MC_BEGIN(0, 1);
8771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8774 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8775 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8776 } IEM_MC_ELSE() {
8777 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8778 } IEM_MC_ENDIF();
8779 IEM_MC_ADVANCE_RIP_AND_FINISH();
8780 IEM_MC_END();
8781 }
8782}
8783
8784
8785/** Opcode 0x0f 0x9d. */
8786FNIEMOP_DEF(iemOp_setnl_Eb)
8787{
8788 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8789 IEMOP_HLP_MIN_386();
8790 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8791
8792 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8793 * any way. AMD says it's "unused", whatever that means. We're
8794 * ignoring for now. */
8795 if (IEM_IS_MODRM_REG_MODE(bRm))
8796 {
8797 /* register target */
8798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8799 IEM_MC_BEGIN(0, 0);
8800 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8801 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8802 } IEM_MC_ELSE() {
8803 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8804 } IEM_MC_ENDIF();
8805 IEM_MC_ADVANCE_RIP_AND_FINISH();
8806 IEM_MC_END();
8807 }
8808 else
8809 {
8810 /* memory target */
8811 IEM_MC_BEGIN(0, 1);
8812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8815 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8816 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8817 } IEM_MC_ELSE() {
8818 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8819 } IEM_MC_ENDIF();
8820 IEM_MC_ADVANCE_RIP_AND_FINISH();
8821 IEM_MC_END();
8822 }
8823}
8824
8825
8826/** Opcode 0x0f 0x9e. */
8827FNIEMOP_DEF(iemOp_setle_Eb)
8828{
8829 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8830 IEMOP_HLP_MIN_386();
8831 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8832
8833 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8834 * any way. AMD says it's "unused", whatever that means. We're
8835 * ignoring for now. */
8836 if (IEM_IS_MODRM_REG_MODE(bRm))
8837 {
8838 /* register target */
8839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8840 IEM_MC_BEGIN(0, 0);
8841 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8842 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8843 } IEM_MC_ELSE() {
8844 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8845 } IEM_MC_ENDIF();
8846 IEM_MC_ADVANCE_RIP_AND_FINISH();
8847 IEM_MC_END();
8848 }
8849 else
8850 {
8851 /* memory target */
8852 IEM_MC_BEGIN(0, 1);
8853 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8856 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8857 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8858 } IEM_MC_ELSE() {
8859 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8860 } IEM_MC_ENDIF();
8861 IEM_MC_ADVANCE_RIP_AND_FINISH();
8862 IEM_MC_END();
8863 }
8864}
8865
8866
8867/** Opcode 0x0f 0x9f. */
8868FNIEMOP_DEF(iemOp_setnle_Eb)
8869{
8870 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8871 IEMOP_HLP_MIN_386();
8872 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8873
8874 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8875 * any way. AMD says it's "unused", whatever that means. We're
8876 * ignoring for now. */
8877 if (IEM_IS_MODRM_REG_MODE(bRm))
8878 {
8879 /* register target */
8880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8881 IEM_MC_BEGIN(0, 0);
8882 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8883 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8884 } IEM_MC_ELSE() {
8885 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8886 } IEM_MC_ENDIF();
8887 IEM_MC_ADVANCE_RIP_AND_FINISH();
8888 IEM_MC_END();
8889 }
8890 else
8891 {
8892 /* memory target */
8893 IEM_MC_BEGIN(0, 1);
8894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8897 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8898 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8899 } IEM_MC_ELSE() {
8900 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8901 } IEM_MC_ENDIF();
8902 IEM_MC_ADVANCE_RIP_AND_FINISH();
8903 IEM_MC_END();
8904 }
8905}
8906
8907
8908/**
8909 * Common 'push segment-register' helper.
8910 */
8911FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
8912{
8913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8914 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
8915 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8916
8917 switch (pVCpu->iem.s.enmEffOpSize)
8918 {
8919 case IEMMODE_16BIT:
8920 IEM_MC_BEGIN(0, 1);
8921 IEM_MC_LOCAL(uint16_t, u16Value);
8922 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
8923 IEM_MC_PUSH_U16(u16Value);
8924 IEM_MC_ADVANCE_RIP_AND_FINISH();
8925 IEM_MC_END();
8926 break;
8927
8928 case IEMMODE_32BIT:
8929 IEM_MC_BEGIN(0, 1);
8930 IEM_MC_LOCAL(uint32_t, u32Value);
8931 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
8932 IEM_MC_PUSH_U32_SREG(u32Value);
8933 IEM_MC_ADVANCE_RIP_AND_FINISH();
8934 IEM_MC_END();
8935 break;
8936
8937 case IEMMODE_64BIT:
8938 IEM_MC_BEGIN(0, 1);
8939 IEM_MC_LOCAL(uint64_t, u64Value);
8940 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
8941 IEM_MC_PUSH_U64(u64Value);
8942 IEM_MC_ADVANCE_RIP_AND_FINISH();
8943 IEM_MC_END();
8944 break;
8945
8946 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8947 }
8948}
8949
8950
8951/** Opcode 0x0f 0xa0. */
8952FNIEMOP_DEF(iemOp_push_fs)
8953{
8954 IEMOP_MNEMONIC(push_fs, "push fs");
8955 IEMOP_HLP_MIN_386();
8956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8957 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8958}
8959
8960
8961/** Opcode 0x0f 0xa1. */
8962FNIEMOP_DEF(iemOp_pop_fs)
8963{
8964 IEMOP_MNEMONIC(pop_fs, "pop fs");
8965 IEMOP_HLP_MIN_386();
8966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8967 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8968}
8969
8970
8971/** Opcode 0x0f 0xa2. */
8972FNIEMOP_DEF(iemOp_cpuid)
8973{
8974 IEMOP_MNEMONIC(cpuid, "cpuid");
8975 IEMOP_HLP_MIN_486(); /* not all 486es. */
8976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8977 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
8978}
8979
8980
8981/**
8982 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8983 * iemOp_bts_Ev_Gv.
8984 */
8985FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
8986{
8987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8988 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8989
8990 if (IEM_IS_MODRM_REG_MODE(bRm))
8991 {
8992 /* register destination. */
8993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8994 switch (pVCpu->iem.s.enmEffOpSize)
8995 {
8996 case IEMMODE_16BIT:
8997 IEM_MC_BEGIN(3, 0);
8998 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8999 IEM_MC_ARG(uint16_t, u16Src, 1);
9000 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9001
9002 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9003 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
9004 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9005 IEM_MC_REF_EFLAGS(pEFlags);
9006 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9007
9008 IEM_MC_ADVANCE_RIP_AND_FINISH();
9009 IEM_MC_END();
9010 break;
9011
9012 case IEMMODE_32BIT:
9013 IEM_MC_BEGIN(3, 0);
9014 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9015 IEM_MC_ARG(uint32_t, u32Src, 1);
9016 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9017
9018 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9019 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
9020 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9021 IEM_MC_REF_EFLAGS(pEFlags);
9022 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9023
9024 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9025 IEM_MC_ADVANCE_RIP_AND_FINISH();
9026 IEM_MC_END();
9027 break;
9028
9029 case IEMMODE_64BIT:
9030 IEM_MC_BEGIN(3, 0);
9031 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9032 IEM_MC_ARG(uint64_t, u64Src, 1);
9033 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9034
9035 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9036 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
9037 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9038 IEM_MC_REF_EFLAGS(pEFlags);
9039 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9040
9041 IEM_MC_ADVANCE_RIP_AND_FINISH();
9042 IEM_MC_END();
9043 break;
9044
9045 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9046 }
9047 }
9048 else
9049 {
9050 /* memory destination. */
9051
9052 uint32_t fAccess;
9053 if (pImpl->pfnLockedU16)
9054 fAccess = IEM_ACCESS_DATA_RW;
9055 else /* BT */
9056 fAccess = IEM_ACCESS_DATA_R;
9057
9058 /** @todo test negative bit offsets! */
9059 switch (pVCpu->iem.s.enmEffOpSize)
9060 {
9061 case IEMMODE_16BIT:
9062 IEM_MC_BEGIN(3, 2);
9063 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9064 IEM_MC_ARG(uint16_t, u16Src, 1);
9065 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9067 IEM_MC_LOCAL(int16_t, i16AddrAdj);
9068
9069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9070 if (pImpl->pfnLockedU16)
9071 IEMOP_HLP_DONE_DECODING();
9072 else
9073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9074 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9075 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
9076 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
9077 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
9078 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
9079 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
9080 IEM_MC_FETCH_EFLAGS(EFlags);
9081
9082 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9083 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9084 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9085 else
9086 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9087 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9088
9089 IEM_MC_COMMIT_EFLAGS(EFlags);
9090 IEM_MC_ADVANCE_RIP_AND_FINISH();
9091 IEM_MC_END();
9092 break;
9093
9094 case IEMMODE_32BIT:
9095 IEM_MC_BEGIN(3, 2);
9096 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9097 IEM_MC_ARG(uint32_t, u32Src, 1);
9098 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9100 IEM_MC_LOCAL(int32_t, i32AddrAdj);
9101
9102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9103 if (pImpl->pfnLockedU16)
9104 IEMOP_HLP_DONE_DECODING();
9105 else
9106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9107 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9108 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
9109 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
9110 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
9111 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
9112 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
9113 IEM_MC_FETCH_EFLAGS(EFlags);
9114
9115 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9116 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9117 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9118 else
9119 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9120 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9121
9122 IEM_MC_COMMIT_EFLAGS(EFlags);
9123 IEM_MC_ADVANCE_RIP_AND_FINISH();
9124 IEM_MC_END();
9125 break;
9126
9127 case IEMMODE_64BIT:
9128 IEM_MC_BEGIN(3, 2);
9129 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9130 IEM_MC_ARG(uint64_t, u64Src, 1);
9131 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9133 IEM_MC_LOCAL(int64_t, i64AddrAdj);
9134
9135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9136 if (pImpl->pfnLockedU16)
9137 IEMOP_HLP_DONE_DECODING();
9138 else
9139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9140 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9141 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
9142 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
9143 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
9144 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
9145 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
9146 IEM_MC_FETCH_EFLAGS(EFlags);
9147
9148 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9149 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9150 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9151 else
9152 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9153 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9154
9155 IEM_MC_COMMIT_EFLAGS(EFlags);
9156 IEM_MC_ADVANCE_RIP_AND_FINISH();
9157 IEM_MC_END();
9158 break;
9159
9160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9161 }
9162 }
9163}
9164
9165
9166/** Opcode 0x0f 0xa3. */
9167FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9168{
9169 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9170 IEMOP_HLP_MIN_386();
9171 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
9172}
9173
9174
9175/**
9176 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9177 */
9178FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
9179{
9180 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9181 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9182
9183 if (IEM_IS_MODRM_REG_MODE(bRm))
9184 {
9185 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9187
9188 switch (pVCpu->iem.s.enmEffOpSize)
9189 {
9190 case IEMMODE_16BIT:
9191 IEM_MC_BEGIN(4, 0);
9192 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9193 IEM_MC_ARG(uint16_t, u16Src, 1);
9194 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9195 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9196
9197 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9198 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9199 IEM_MC_REF_EFLAGS(pEFlags);
9200 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9201
9202 IEM_MC_ADVANCE_RIP_AND_FINISH();
9203 IEM_MC_END();
9204 break;
9205
9206 case IEMMODE_32BIT:
9207 IEM_MC_BEGIN(4, 0);
9208 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9209 IEM_MC_ARG(uint32_t, u32Src, 1);
9210 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9211 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9212
9213 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9214 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9215 IEM_MC_REF_EFLAGS(pEFlags);
9216 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9217
9218 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9219 IEM_MC_ADVANCE_RIP_AND_FINISH();
9220 IEM_MC_END();
9221 break;
9222
9223 case IEMMODE_64BIT:
9224 IEM_MC_BEGIN(4, 0);
9225 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9226 IEM_MC_ARG(uint64_t, u64Src, 1);
9227 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9228 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9229
9230 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9231 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9232 IEM_MC_REF_EFLAGS(pEFlags);
9233 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9234
9235 IEM_MC_ADVANCE_RIP_AND_FINISH();
9236 IEM_MC_END();
9237 break;
9238
9239 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9240 }
9241 }
9242 else
9243 {
9244 switch (pVCpu->iem.s.enmEffOpSize)
9245 {
9246 case IEMMODE_16BIT:
9247 IEM_MC_BEGIN(4, 2);
9248 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9249 IEM_MC_ARG(uint16_t, u16Src, 1);
9250 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9251 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9253
9254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9255 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9256 IEM_MC_ASSIGN(cShiftArg, cShift);
9257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9258 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9259 IEM_MC_FETCH_EFLAGS(EFlags);
9260 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9261 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9262
9263 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9264 IEM_MC_COMMIT_EFLAGS(EFlags);
9265 IEM_MC_ADVANCE_RIP_AND_FINISH();
9266 IEM_MC_END();
9267 break;
9268
9269 case IEMMODE_32BIT:
9270 IEM_MC_BEGIN(4, 2);
9271 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9272 IEM_MC_ARG(uint32_t, u32Src, 1);
9273 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9274 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9276
9277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9278 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9279 IEM_MC_ASSIGN(cShiftArg, cShift);
9280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9281 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9282 IEM_MC_FETCH_EFLAGS(EFlags);
9283 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9284 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9285
9286 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9287 IEM_MC_COMMIT_EFLAGS(EFlags);
9288 IEM_MC_ADVANCE_RIP_AND_FINISH();
9289 IEM_MC_END();
9290 break;
9291
9292 case IEMMODE_64BIT:
9293 IEM_MC_BEGIN(4, 2);
9294 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9295 IEM_MC_ARG(uint64_t, u64Src, 1);
9296 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9297 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9298 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9299
9300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9301 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9302 IEM_MC_ASSIGN(cShiftArg, cShift);
9303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9304 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9305 IEM_MC_FETCH_EFLAGS(EFlags);
9306 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9307 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9308
9309 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9310 IEM_MC_COMMIT_EFLAGS(EFlags);
9311 IEM_MC_ADVANCE_RIP_AND_FINISH();
9312 IEM_MC_END();
9313 break;
9314
9315 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9316 }
9317 }
9318}
9319
9320
9321/**
9322 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9323 */
9324FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
9325{
9326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9327 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9328
9329 if (IEM_IS_MODRM_REG_MODE(bRm))
9330 {
9331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9332
9333 switch (pVCpu->iem.s.enmEffOpSize)
9334 {
9335 case IEMMODE_16BIT:
9336 IEM_MC_BEGIN(4, 0);
9337 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9338 IEM_MC_ARG(uint16_t, u16Src, 1);
9339 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9340 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9341
9342 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9343 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9344 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9345 IEM_MC_REF_EFLAGS(pEFlags);
9346 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9347
9348 IEM_MC_ADVANCE_RIP_AND_FINISH();
9349 IEM_MC_END();
9350 break;
9351
9352 case IEMMODE_32BIT:
9353 IEM_MC_BEGIN(4, 0);
9354 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9355 IEM_MC_ARG(uint32_t, u32Src, 1);
9356 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9357 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9358
9359 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9360 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9361 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9362 IEM_MC_REF_EFLAGS(pEFlags);
9363 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9364
9365 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9366 IEM_MC_ADVANCE_RIP_AND_FINISH();
9367 IEM_MC_END();
9368 break;
9369
9370 case IEMMODE_64BIT:
9371 IEM_MC_BEGIN(4, 0);
9372 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9373 IEM_MC_ARG(uint64_t, u64Src, 1);
9374 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9375 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9376
9377 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9378 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9379 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9380 IEM_MC_REF_EFLAGS(pEFlags);
9381 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9382
9383 IEM_MC_ADVANCE_RIP_AND_FINISH();
9384 IEM_MC_END();
9385 break;
9386
9387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9388 }
9389 }
9390 else
9391 {
9392 switch (pVCpu->iem.s.enmEffOpSize)
9393 {
9394 case IEMMODE_16BIT:
9395 IEM_MC_BEGIN(4, 2);
9396 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9397 IEM_MC_ARG(uint16_t, u16Src, 1);
9398 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9399 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9401
9402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9404 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9405 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9406 IEM_MC_FETCH_EFLAGS(EFlags);
9407 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9408 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9409
9410 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9411 IEM_MC_COMMIT_EFLAGS(EFlags);
9412 IEM_MC_ADVANCE_RIP_AND_FINISH();
9413 IEM_MC_END();
9414 break;
9415
9416 case IEMMODE_32BIT:
9417 IEM_MC_BEGIN(4, 2);
9418 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9419 IEM_MC_ARG(uint32_t, u32Src, 1);
9420 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9421 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9422 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9423
9424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9426 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9427 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9428 IEM_MC_FETCH_EFLAGS(EFlags);
9429 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9430 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9431
9432 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9433 IEM_MC_COMMIT_EFLAGS(EFlags);
9434 IEM_MC_ADVANCE_RIP_AND_FINISH();
9435 IEM_MC_END();
9436 break;
9437
9438 case IEMMODE_64BIT:
9439 IEM_MC_BEGIN(4, 2);
9440 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9441 IEM_MC_ARG(uint64_t, u64Src, 1);
9442 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9443 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9445
9446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9448 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9449 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9450 IEM_MC_FETCH_EFLAGS(EFlags);
9451 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9452 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9453
9454 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9455 IEM_MC_COMMIT_EFLAGS(EFlags);
9456 IEM_MC_ADVANCE_RIP_AND_FINISH();
9457 IEM_MC_END();
9458 break;
9459
9460 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9461 }
9462 }
9463}
9464
9465
9466
9467/** Opcode 0x0f 0xa4. */
9468FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9469{
9470 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9471 IEMOP_HLP_MIN_386();
9472 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9473}
9474
9475
9476/** Opcode 0x0f 0xa5. */
9477FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9478{
9479 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9480 IEMOP_HLP_MIN_386();
9481 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9482}
9483
9484
9485/** Opcode 0x0f 0xa8. */
9486FNIEMOP_DEF(iemOp_push_gs)
9487{
9488 IEMOP_MNEMONIC(push_gs, "push gs");
9489 IEMOP_HLP_MIN_386();
9490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9491 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9492}
9493
9494
9495/** Opcode 0x0f 0xa9. */
9496FNIEMOP_DEF(iemOp_pop_gs)
9497{
9498 IEMOP_MNEMONIC(pop_gs, "pop gs");
9499 IEMOP_HLP_MIN_386();
9500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9501 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9502}
9503
9504
9505/** Opcode 0x0f 0xaa. */
9506FNIEMOP_DEF(iemOp_rsm)
9507{
9508 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9509 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9511 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
9512}
9513
9514
9515
9516/** Opcode 0x0f 0xab. */
9517FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9518{
9519 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9520 IEMOP_HLP_MIN_386();
9521 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
9522}
9523
9524
9525/** Opcode 0x0f 0xac. */
9526FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9527{
9528 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9529 IEMOP_HLP_MIN_386();
9530 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9531}
9532
9533
9534/** Opcode 0x0f 0xad. */
9535FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9536{
9537 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9538 IEMOP_HLP_MIN_386();
9539 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9540}
9541
9542
9543/** Opcode 0x0f 0xae mem/0. */
9544FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9545{
9546 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9547 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9548 return IEMOP_RAISE_INVALID_OPCODE();
9549
9550 IEM_MC_BEGIN(3, 1);
9551 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9552 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9553 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9556 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9557 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9558 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9559 IEM_MC_END();
9560 return VINF_SUCCESS;
9561}
9562
9563
9564/** Opcode 0x0f 0xae mem/1. */
9565FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9566{
9567 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9568 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9569 return IEMOP_RAISE_INVALID_OPCODE();
9570
9571 IEM_MC_BEGIN(3, 1);
9572 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9573 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9574 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9577 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9578 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9579 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9580 IEM_MC_END();
9581 return VINF_SUCCESS;
9582}
9583
9584
9585/**
9586 * @opmaps grp15
9587 * @opcode !11/2
9588 * @oppfx none
9589 * @opcpuid sse
9590 * @opgroup og_sse_mxcsrsm
9591 * @opxcpttype 5
9592 * @optest op1=0 -> mxcsr=0
9593 * @optest op1=0x2083 -> mxcsr=0x2083
9594 * @optest op1=0xfffffffe -> value.xcpt=0xd
9595 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9596 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9597 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9598 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9599 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9600 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9601 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9602 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9603 */
9604FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9605{
9606 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9607 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9608 return IEMOP_RAISE_INVALID_OPCODE();
9609
9610 IEM_MC_BEGIN(2, 0);
9611 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9612 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9615 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9616 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9617 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9618 IEM_MC_END();
9619 return VINF_SUCCESS;
9620}
9621
9622
9623/**
9624 * @opmaps grp15
9625 * @opcode !11/3
9626 * @oppfx none
9627 * @opcpuid sse
9628 * @opgroup og_sse_mxcsrsm
9629 * @opxcpttype 5
9630 * @optest mxcsr=0 -> op1=0
9631 * @optest mxcsr=0x2083 -> op1=0x2083
9632 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9633 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9634 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9635 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9636 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9637 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9638 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9639 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9640 */
9641FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9642{
9643 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9644 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9645 return IEMOP_RAISE_INVALID_OPCODE();
9646
9647 IEM_MC_BEGIN(2, 0);
9648 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9649 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9653 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9654 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9655 IEM_MC_END();
9656 return VINF_SUCCESS;
9657}
9658
9659
9660/**
9661 * @opmaps grp15
9662 * @opcode !11/4
9663 * @oppfx none
9664 * @opcpuid xsave
9665 * @opgroup og_system
9666 * @opxcpttype none
9667 */
9668FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9669{
9670 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9671 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9672 return IEMOP_RAISE_INVALID_OPCODE();
9673
9674 IEM_MC_BEGIN(3, 0);
9675 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9676 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9677 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9680 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9681 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9682 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9683 IEM_MC_END();
9684 return VINF_SUCCESS;
9685}
9686
9687
9688/**
9689 * @opmaps grp15
9690 * @opcode !11/5
9691 * @oppfx none
9692 * @opcpuid xsave
9693 * @opgroup og_system
9694 * @opxcpttype none
9695 */
9696FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9697{
9698 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9699 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9700 return IEMOP_RAISE_INVALID_OPCODE();
9701
9702 IEM_MC_BEGIN(3, 0);
9703 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9704 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9705 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9708 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9709 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9710 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9711 IEM_MC_END();
9712 return VINF_SUCCESS;
9713}
9714
9715/** Opcode 0x0f 0xae mem/6. */
9716FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9717
9718/**
9719 * @opmaps grp15
9720 * @opcode !11/7
9721 * @oppfx none
9722 * @opcpuid clfsh
9723 * @opgroup og_cachectl
9724 * @optest op1=1 ->
9725 */
9726FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9727{
9728 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9729 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9730 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9731
9732 IEM_MC_BEGIN(2, 0);
9733 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9734 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9737 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9738 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9739 IEM_MC_END();
9740 return VINF_SUCCESS;
9741}
9742
9743/**
9744 * @opmaps grp15
9745 * @opcode !11/7
9746 * @oppfx 0x66
9747 * @opcpuid clflushopt
9748 * @opgroup og_cachectl
9749 * @optest op1=1 ->
9750 */
9751FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9752{
9753 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9754 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9755 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9756
9757 IEM_MC_BEGIN(2, 0);
9758 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9759 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9762 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9763 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9764 IEM_MC_END();
9765 return VINF_SUCCESS;
9766}
9767
9768
9769/** Opcode 0x0f 0xae 11b/5. */
9770FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9771{
9772 RT_NOREF_PV(bRm);
9773 IEMOP_MNEMONIC(lfence, "lfence");
9774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9775 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9776 return IEMOP_RAISE_INVALID_OPCODE();
9777
9778 IEM_MC_BEGIN(0, 0);
9779#ifndef RT_ARCH_ARM64
9780 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9781#endif
9782 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9783#ifndef RT_ARCH_ARM64
9784 else
9785 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9786#endif
9787 IEM_MC_ADVANCE_RIP_AND_FINISH();
9788 IEM_MC_END();
9789}
9790
9791
9792/** Opcode 0x0f 0xae 11b/6. */
9793FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9794{
9795 RT_NOREF_PV(bRm);
9796 IEMOP_MNEMONIC(mfence, "mfence");
9797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9798 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9799 return IEMOP_RAISE_INVALID_OPCODE();
9800
9801 IEM_MC_BEGIN(0, 0);
9802#ifndef RT_ARCH_ARM64
9803 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9804#endif
9805 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9806#ifndef RT_ARCH_ARM64
9807 else
9808 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9809#endif
9810 IEM_MC_ADVANCE_RIP_AND_FINISH();
9811 IEM_MC_END();
9812}
9813
9814
9815/** Opcode 0x0f 0xae 11b/7. */
9816FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9817{
9818 RT_NOREF_PV(bRm);
9819 IEMOP_MNEMONIC(sfence, "sfence");
9820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9821 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9822 return IEMOP_RAISE_INVALID_OPCODE();
9823
9824 IEM_MC_BEGIN(0, 0);
9825#ifndef RT_ARCH_ARM64
9826 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9827#endif
9828 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9829#ifndef RT_ARCH_ARM64
9830 else
9831 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9832#endif
9833 IEM_MC_ADVANCE_RIP_AND_FINISH();
9834 IEM_MC_END();
9835}
9836
9837
9838/** Opcode 0xf3 0x0f 0xae 11b/0. */
9839FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9840{
9841 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9843 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9844 {
9845 IEM_MC_BEGIN(1, 0);
9846 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9847 IEM_MC_ARG(uint64_t, u64Dst, 0);
9848 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9849 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9850 IEM_MC_ADVANCE_RIP_AND_FINISH();
9851 IEM_MC_END();
9852 }
9853 else
9854 {
9855 IEM_MC_BEGIN(1, 0);
9856 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9857 IEM_MC_ARG(uint32_t, u32Dst, 0);
9858 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
9859 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9860 IEM_MC_ADVANCE_RIP_AND_FINISH();
9861 IEM_MC_END();
9862 }
9863}
9864
9865
9866/** Opcode 0xf3 0x0f 0xae 11b/1. */
9867FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
9868{
9869 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
9870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9871 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9872 {
9873 IEM_MC_BEGIN(1, 0);
9874 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9875 IEM_MC_ARG(uint64_t, u64Dst, 0);
9876 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
9877 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9878 IEM_MC_ADVANCE_RIP_AND_FINISH();
9879 IEM_MC_END();
9880 }
9881 else
9882 {
9883 IEM_MC_BEGIN(1, 0);
9884 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9885 IEM_MC_ARG(uint32_t, u32Dst, 0);
9886 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
9887 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9888 IEM_MC_ADVANCE_RIP_AND_FINISH();
9889 IEM_MC_END();
9890 }
9891}
9892
9893
9894/** Opcode 0xf3 0x0f 0xae 11b/2. */
9895FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
9896{
9897 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
9898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9899 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9900 {
9901 IEM_MC_BEGIN(1, 0);
9902 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9903 IEM_MC_ARG(uint64_t, u64Dst, 0);
9904 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9905 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9906 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
9907 IEM_MC_ADVANCE_RIP_AND_FINISH();
9908 IEM_MC_END();
9909 }
9910 else
9911 {
9912 IEM_MC_BEGIN(1, 0);
9913 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9914 IEM_MC_ARG(uint32_t, u32Dst, 0);
9915 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9916 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
9917 IEM_MC_ADVANCE_RIP_AND_FINISH();
9918 IEM_MC_END();
9919 }
9920}
9921
9922
9923/** Opcode 0xf3 0x0f 0xae 11b/3. */
9924FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
9925{
9926 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
9927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9928 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9929 {
9930 IEM_MC_BEGIN(1, 0);
9931 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9932 IEM_MC_ARG(uint64_t, u64Dst, 0);
9933 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9934 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9935 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
9936 IEM_MC_ADVANCE_RIP_AND_FINISH();
9937 IEM_MC_END();
9938 }
9939 else
9940 {
9941 IEM_MC_BEGIN(1, 0);
9942 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9943 IEM_MC_ARG(uint32_t, u32Dst, 0);
9944 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9945 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
9946 IEM_MC_ADVANCE_RIP_AND_FINISH();
9947 IEM_MC_END();
9948 }
9949}
9950
9951
9952/**
9953 * Group 15 jump table for register variant.
9954 */
9955IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
9956{ /* pfx: none, 066h, 0f3h, 0f2h */
9957 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
9958 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
9959 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
9960 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
9961 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9962 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9963 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9964 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9965};
9966AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
9967
9968
9969/**
9970 * Group 15 jump table for memory variant.
9971 */
9972IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
9973{ /* pfx: none, 066h, 0f3h, 0f2h */
9974 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9975 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9976 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9977 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9978 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9979 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9980 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9981 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9982};
9983AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
9984
9985
9986/** Opcode 0x0f 0xae. */
9987FNIEMOP_DEF(iemOp_Grp15)
9988{
9989 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
9990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9991 if (IEM_IS_MODRM_REG_MODE(bRm))
9992 /* register, register */
9993 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9994 + pVCpu->iem.s.idxPrefix], bRm);
9995 /* memory, register */
9996 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9997 + pVCpu->iem.s.idxPrefix], bRm);
9998}
9999
10000
10001/** Opcode 0x0f 0xaf. */
10002FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10003{
10004 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10005 IEMOP_HLP_MIN_386();
10006 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10007 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags));
10008}
10009
10010
10011/** Opcode 0x0f 0xb0. */
10012FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10013{
10014 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10015 IEMOP_HLP_MIN_486();
10016 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10017
10018 if (IEM_IS_MODRM_REG_MODE(bRm))
10019 {
10020 IEMOP_HLP_DONE_DECODING();
10021 IEM_MC_BEGIN(4, 0);
10022 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10023 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10024 IEM_MC_ARG(uint8_t, u8Src, 2);
10025 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10026
10027 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10028 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10029 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10030 IEM_MC_REF_EFLAGS(pEFlags);
10031 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10032 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10033 else
10034 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10035
10036 IEM_MC_ADVANCE_RIP_AND_FINISH();
10037 IEM_MC_END();
10038 }
10039 else
10040 {
10041 IEM_MC_BEGIN(4, 3);
10042 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10043 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10044 IEM_MC_ARG(uint8_t, u8Src, 2);
10045 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10047 IEM_MC_LOCAL(uint8_t, u8Al);
10048
10049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10050 IEMOP_HLP_DONE_DECODING();
10051 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10052 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10053 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
10054 IEM_MC_FETCH_EFLAGS(EFlags);
10055 IEM_MC_REF_LOCAL(pu8Al, u8Al);
10056 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10057 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10058 else
10059 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10060
10061 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10062 IEM_MC_COMMIT_EFLAGS(EFlags);
10063 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
10064 IEM_MC_ADVANCE_RIP_AND_FINISH();
10065 IEM_MC_END();
10066 }
10067}
10068
10069/** Opcode 0x0f 0xb1. */
10070FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10071{
10072 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10073 IEMOP_HLP_MIN_486();
10074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10075
10076 if (IEM_IS_MODRM_REG_MODE(bRm))
10077 {
10078 IEMOP_HLP_DONE_DECODING();
10079 switch (pVCpu->iem.s.enmEffOpSize)
10080 {
10081 case IEMMODE_16BIT:
10082 IEM_MC_BEGIN(4, 0);
10083 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10084 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10085 IEM_MC_ARG(uint16_t, u16Src, 2);
10086 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10087
10088 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10089 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10090 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10091 IEM_MC_REF_EFLAGS(pEFlags);
10092 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10093 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10094 else
10095 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10096
10097 IEM_MC_ADVANCE_RIP_AND_FINISH();
10098 IEM_MC_END();
10099 break;
10100
10101 case IEMMODE_32BIT:
10102 IEM_MC_BEGIN(4, 0);
10103 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10104 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10105 IEM_MC_ARG(uint32_t, u32Src, 2);
10106 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10107
10108 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10109 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10110 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10111 IEM_MC_REF_EFLAGS(pEFlags);
10112 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10113 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10114 else
10115 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10116
10117 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10118 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10119 } IEM_MC_ELSE() {
10120 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
10121 } IEM_MC_ENDIF();
10122
10123 IEM_MC_ADVANCE_RIP_AND_FINISH();
10124 IEM_MC_END();
10125 break;
10126
10127 case IEMMODE_64BIT:
10128 IEM_MC_BEGIN(4, 0);
10129 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10130 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10131#ifdef RT_ARCH_X86
10132 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10133#else
10134 IEM_MC_ARG(uint64_t, u64Src, 2);
10135#endif
10136 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10137
10138 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10139 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10140 IEM_MC_REF_EFLAGS(pEFlags);
10141#ifdef RT_ARCH_X86
10142 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10143 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10144 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10145 else
10146 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10147#else
10148 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10149 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10150 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10151 else
10152 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10153#endif
10154
10155 IEM_MC_ADVANCE_RIP_AND_FINISH();
10156 IEM_MC_END();
10157 break;
10158
10159 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10160 }
10161 }
10162 else
10163 {
10164 switch (pVCpu->iem.s.enmEffOpSize)
10165 {
10166 case IEMMODE_16BIT:
10167 IEM_MC_BEGIN(4, 3);
10168 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10169 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10170 IEM_MC_ARG(uint16_t, u16Src, 2);
10171 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10173 IEM_MC_LOCAL(uint16_t, u16Ax);
10174
10175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10176 IEMOP_HLP_DONE_DECODING();
10177 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10178 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10179 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
10180 IEM_MC_FETCH_EFLAGS(EFlags);
10181 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
10182 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10183 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10184 else
10185 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10186
10187 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10188 IEM_MC_COMMIT_EFLAGS(EFlags);
10189 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
10190 IEM_MC_ADVANCE_RIP_AND_FINISH();
10191 IEM_MC_END();
10192 break;
10193
10194 case IEMMODE_32BIT:
10195 IEM_MC_BEGIN(4, 3);
10196 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10197 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10198 IEM_MC_ARG(uint32_t, u32Src, 2);
10199 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10201 IEM_MC_LOCAL(uint32_t, u32Eax);
10202
10203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10204 IEMOP_HLP_DONE_DECODING();
10205 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10206 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10207 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
10208 IEM_MC_FETCH_EFLAGS(EFlags);
10209 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
10210 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10211 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10212 else
10213 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10214
10215 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10216 IEM_MC_COMMIT_EFLAGS(EFlags);
10217
10218 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10219 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
10220 IEM_MC_ENDIF();
10221
10222 IEM_MC_ADVANCE_RIP_AND_FINISH();
10223 IEM_MC_END();
10224 break;
10225
10226 case IEMMODE_64BIT:
10227 IEM_MC_BEGIN(4, 3);
10228 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10229 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10230#ifdef RT_ARCH_X86
10231 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10232#else
10233 IEM_MC_ARG(uint64_t, u64Src, 2);
10234#endif
10235 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10237 IEM_MC_LOCAL(uint64_t, u64Rax);
10238
10239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10240 IEMOP_HLP_DONE_DECODING();
10241 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10242 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
10243 IEM_MC_FETCH_EFLAGS(EFlags);
10244 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
10245#ifdef RT_ARCH_X86
10246 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10247 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10248 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10249 else
10250 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10251#else
10252 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10253 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10254 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10255 else
10256 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10257#endif
10258
10259 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10260 IEM_MC_COMMIT_EFLAGS(EFlags);
10261 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
10262 IEM_MC_ADVANCE_RIP_AND_FINISH();
10263 IEM_MC_END();
10264 break;
10265
10266 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10267 }
10268 }
10269}
10270
10271
10272FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
10273{
10274 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
10275 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
10276
10277 switch (pVCpu->iem.s.enmEffOpSize)
10278 {
10279 case IEMMODE_16BIT:
10280 IEM_MC_BEGIN(5, 1);
10281 IEM_MC_ARG(uint16_t, uSel, 0);
10282 IEM_MC_ARG(uint16_t, offSeg, 1);
10283 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10284 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10285 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10286 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10289 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10290 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
10291 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10292 IEM_MC_END();
10293 return VINF_SUCCESS;
10294
10295 case IEMMODE_32BIT:
10296 IEM_MC_BEGIN(5, 1);
10297 IEM_MC_ARG(uint16_t, uSel, 0);
10298 IEM_MC_ARG(uint32_t, offSeg, 1);
10299 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10300 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10301 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10302 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10305 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10306 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
10307 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10308 IEM_MC_END();
10309 return VINF_SUCCESS;
10310
10311 case IEMMODE_64BIT:
10312 IEM_MC_BEGIN(5, 1);
10313 IEM_MC_ARG(uint16_t, uSel, 0);
10314 IEM_MC_ARG(uint64_t, offSeg, 1);
10315 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10316 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10317 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10318 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10321 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
10322 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10323 else
10324 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10325 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
10326 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10327 IEM_MC_END();
10328 return VINF_SUCCESS;
10329
10330 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10331 }
10332}
10333
10334
10335/** Opcode 0x0f 0xb2. */
10336FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10337{
10338 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10339 IEMOP_HLP_MIN_386();
10340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10341 if (IEM_IS_MODRM_REG_MODE(bRm))
10342 return IEMOP_RAISE_INVALID_OPCODE();
10343 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10344}
10345
10346
10347/** Opcode 0x0f 0xb3. */
10348FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10349{
10350 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10351 IEMOP_HLP_MIN_386();
10352 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
10353}
10354
10355
10356/** Opcode 0x0f 0xb4. */
10357FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10358{
10359 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10360 IEMOP_HLP_MIN_386();
10361 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10362 if (IEM_IS_MODRM_REG_MODE(bRm))
10363 return IEMOP_RAISE_INVALID_OPCODE();
10364 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10365}
10366
10367
10368/** Opcode 0x0f 0xb5. */
10369FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10370{
10371 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10372 IEMOP_HLP_MIN_386();
10373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10374 if (IEM_IS_MODRM_REG_MODE(bRm))
10375 return IEMOP_RAISE_INVALID_OPCODE();
10376 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10377}
10378
10379
10380/** Opcode 0x0f 0xb6. */
10381FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10382{
10383 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10384 IEMOP_HLP_MIN_386();
10385
10386 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10387
10388 /*
10389 * If rm is denoting a register, no more instruction bytes.
10390 */
10391 if (IEM_IS_MODRM_REG_MODE(bRm))
10392 {
10393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10394 switch (pVCpu->iem.s.enmEffOpSize)
10395 {
10396 case IEMMODE_16BIT:
10397 IEM_MC_BEGIN(0, 1);
10398 IEM_MC_LOCAL(uint16_t, u16Value);
10399 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10400 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10401 IEM_MC_ADVANCE_RIP_AND_FINISH();
10402 IEM_MC_END();
10403 break;
10404
10405 case IEMMODE_32BIT:
10406 IEM_MC_BEGIN(0, 1);
10407 IEM_MC_LOCAL(uint32_t, u32Value);
10408 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10409 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10410 IEM_MC_ADVANCE_RIP_AND_FINISH();
10411 IEM_MC_END();
10412 break;
10413
10414 case IEMMODE_64BIT:
10415 IEM_MC_BEGIN(0, 1);
10416 IEM_MC_LOCAL(uint64_t, u64Value);
10417 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10418 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10419 IEM_MC_ADVANCE_RIP_AND_FINISH();
10420 IEM_MC_END();
10421 break;
10422
10423 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10424 }
10425 }
10426 else
10427 {
10428 /*
10429 * We're loading a register from memory.
10430 */
10431 switch (pVCpu->iem.s.enmEffOpSize)
10432 {
10433 case IEMMODE_16BIT:
10434 IEM_MC_BEGIN(0, 2);
10435 IEM_MC_LOCAL(uint16_t, u16Value);
10436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10439 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10440 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10441 IEM_MC_ADVANCE_RIP_AND_FINISH();
10442 IEM_MC_END();
10443 break;
10444
10445 case IEMMODE_32BIT:
10446 IEM_MC_BEGIN(0, 2);
10447 IEM_MC_LOCAL(uint32_t, u32Value);
10448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10451 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10452 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10453 IEM_MC_ADVANCE_RIP_AND_FINISH();
10454 IEM_MC_END();
10455 break;
10456
10457 case IEMMODE_64BIT:
10458 IEM_MC_BEGIN(0, 2);
10459 IEM_MC_LOCAL(uint64_t, u64Value);
10460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10463 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10464 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10465 IEM_MC_ADVANCE_RIP_AND_FINISH();
10466 IEM_MC_END();
10467 break;
10468
10469 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10470 }
10471 }
10472}
10473
10474
10475/** Opcode 0x0f 0xb7. */
10476FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10477{
10478 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10479 IEMOP_HLP_MIN_386();
10480
10481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10482
10483 /** @todo Not entirely sure how the operand size prefix is handled here,
10484 * assuming that it will be ignored. Would be nice to have a few
10485 * test for this. */
10486 /*
10487 * If rm is denoting a register, no more instruction bytes.
10488 */
10489 if (IEM_IS_MODRM_REG_MODE(bRm))
10490 {
10491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10492 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10493 {
10494 IEM_MC_BEGIN(0, 1);
10495 IEM_MC_LOCAL(uint32_t, u32Value);
10496 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10497 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10498 IEM_MC_ADVANCE_RIP_AND_FINISH();
10499 IEM_MC_END();
10500 }
10501 else
10502 {
10503 IEM_MC_BEGIN(0, 1);
10504 IEM_MC_LOCAL(uint64_t, u64Value);
10505 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10506 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10507 IEM_MC_ADVANCE_RIP_AND_FINISH();
10508 IEM_MC_END();
10509 }
10510 }
10511 else
10512 {
10513 /*
10514 * We're loading a register from memory.
10515 */
10516 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10517 {
10518 IEM_MC_BEGIN(0, 2);
10519 IEM_MC_LOCAL(uint32_t, u32Value);
10520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10523 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10524 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10525 IEM_MC_ADVANCE_RIP_AND_FINISH();
10526 IEM_MC_END();
10527 }
10528 else
10529 {
10530 IEM_MC_BEGIN(0, 2);
10531 IEM_MC_LOCAL(uint64_t, u64Value);
10532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10535 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10536 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10537 IEM_MC_ADVANCE_RIP_AND_FINISH();
10538 IEM_MC_END();
10539 }
10540 }
10541}
10542
10543
10544/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10545FNIEMOP_UD_STUB(iemOp_jmpe);
10546
10547
10548/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
10549FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10550{
10551 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10552 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10553 return iemOp_InvalidNeedRM(pVCpu);
10554#ifndef TST_IEM_CHECK_MC
10555# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10556 static const IEMOPBINSIZES s_Native =
10557 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10558# endif
10559 static const IEMOPBINSIZES s_Fallback =
10560 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10561#endif
10562 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback));
10563}
10564
10565
10566/**
10567 * @opcode 0xb9
10568 * @opinvalid intel-modrm
10569 * @optest ->
10570 */
10571FNIEMOP_DEF(iemOp_Grp10)
10572{
10573 /*
10574 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10575 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10576 */
10577 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10578 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10579 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10580}
10581
10582
10583/** Opcode 0x0f 0xba. */
10584FNIEMOP_DEF(iemOp_Grp8)
10585{
10586 IEMOP_HLP_MIN_386();
10587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10588 PCIEMOPBINSIZES pImpl;
10589 switch (IEM_GET_MODRM_REG_8(bRm))
10590 {
10591 case 0: case 1: case 2: case 3:
10592 /* Both AMD and Intel want full modr/m decoding and imm8. */
10593 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
10594 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
10595 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
10596 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
10597 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
10598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10599 }
10600 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10601
10602 if (IEM_IS_MODRM_REG_MODE(bRm))
10603 {
10604 /* register destination. */
10605 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10607
10608 switch (pVCpu->iem.s.enmEffOpSize)
10609 {
10610 case IEMMODE_16BIT:
10611 IEM_MC_BEGIN(3, 0);
10612 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10613 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
10614 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10615
10616 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10617 IEM_MC_REF_EFLAGS(pEFlags);
10618 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10619
10620 IEM_MC_ADVANCE_RIP_AND_FINISH();
10621 IEM_MC_END();
10622 break;
10623
10624 case IEMMODE_32BIT:
10625 IEM_MC_BEGIN(3, 0);
10626 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10627 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
10628 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10629
10630 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10631 IEM_MC_REF_EFLAGS(pEFlags);
10632 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10633
10634 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10635 IEM_MC_ADVANCE_RIP_AND_FINISH();
10636 IEM_MC_END();
10637 break;
10638
10639 case IEMMODE_64BIT:
10640 IEM_MC_BEGIN(3, 0);
10641 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10642 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
10643 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10644
10645 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10646 IEM_MC_REF_EFLAGS(pEFlags);
10647 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10648
10649 IEM_MC_ADVANCE_RIP_AND_FINISH();
10650 IEM_MC_END();
10651 break;
10652
10653 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10654 }
10655 }
10656 else
10657 {
10658 /* memory destination. */
10659
10660 uint32_t fAccess;
10661 if (pImpl->pfnLockedU16)
10662 fAccess = IEM_ACCESS_DATA_RW;
10663 else /* BT */
10664 fAccess = IEM_ACCESS_DATA_R;
10665
10666 /** @todo test negative bit offsets! */
10667 switch (pVCpu->iem.s.enmEffOpSize)
10668 {
10669 case IEMMODE_16BIT:
10670 IEM_MC_BEGIN(3, 1);
10671 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10672 IEM_MC_ARG(uint16_t, u16Src, 1);
10673 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10675
10676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10677 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10678 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
10679 if (pImpl->pfnLockedU16)
10680 IEMOP_HLP_DONE_DECODING();
10681 else
10682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10683 IEM_MC_FETCH_EFLAGS(EFlags);
10684 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10685 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10686 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10687 else
10688 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10689 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10690
10691 IEM_MC_COMMIT_EFLAGS(EFlags);
10692 IEM_MC_ADVANCE_RIP_AND_FINISH();
10693 IEM_MC_END();
10694 break;
10695
10696 case IEMMODE_32BIT:
10697 IEM_MC_BEGIN(3, 1);
10698 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10699 IEM_MC_ARG(uint32_t, u32Src, 1);
10700 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10702
10703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10704 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10705 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
10706 if (pImpl->pfnLockedU16)
10707 IEMOP_HLP_DONE_DECODING();
10708 else
10709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10710 IEM_MC_FETCH_EFLAGS(EFlags);
10711 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10712 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10713 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10714 else
10715 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10716 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10717
10718 IEM_MC_COMMIT_EFLAGS(EFlags);
10719 IEM_MC_ADVANCE_RIP_AND_FINISH();
10720 IEM_MC_END();
10721 break;
10722
10723 case IEMMODE_64BIT:
10724 IEM_MC_BEGIN(3, 1);
10725 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10726 IEM_MC_ARG(uint64_t, u64Src, 1);
10727 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10729
10730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10731 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10732 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
10733 if (pImpl->pfnLockedU16)
10734 IEMOP_HLP_DONE_DECODING();
10735 else
10736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10737 IEM_MC_FETCH_EFLAGS(EFlags);
10738 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10739 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10740 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10741 else
10742 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10743 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10744
10745 IEM_MC_COMMIT_EFLAGS(EFlags);
10746 IEM_MC_ADVANCE_RIP_AND_FINISH();
10747 IEM_MC_END();
10748 break;
10749
10750 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10751 }
10752 }
10753}
10754
10755
10756/** Opcode 0x0f 0xbb. */
10757FNIEMOP_DEF(iemOp_btc_Ev_Gv)
10758{
10759 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
10760 IEMOP_HLP_MIN_386();
10761 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
10762}
10763
10764
10765/**
10766 * Common worker for BSF and BSR instructions.
10767 *
10768 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
10769 * the destination register, which means that for 32-bit operations the high
10770 * bits must be left alone.
10771 *
10772 * @param pImpl Pointer to the instruction implementation (assembly).
10773 */
10774FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
10775{
10776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10777
10778 /*
10779 * If rm is denoting a register, no more instruction bytes.
10780 */
10781 if (IEM_IS_MODRM_REG_MODE(bRm))
10782 {
10783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10784 switch (pVCpu->iem.s.enmEffOpSize)
10785 {
10786 case IEMMODE_16BIT:
10787 IEM_MC_BEGIN(3, 0);
10788 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10789 IEM_MC_ARG(uint16_t, u16Src, 1);
10790 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10791
10792 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10793 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10794 IEM_MC_REF_EFLAGS(pEFlags);
10795 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10796
10797 IEM_MC_ADVANCE_RIP_AND_FINISH();
10798 IEM_MC_END();
10799 break;
10800
10801 case IEMMODE_32BIT:
10802 IEM_MC_BEGIN(3, 0);
10803 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10804 IEM_MC_ARG(uint32_t, u32Src, 1);
10805 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10806
10807 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10808 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10809 IEM_MC_REF_EFLAGS(pEFlags);
10810 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10811 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10812 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10813 IEM_MC_ENDIF();
10814 IEM_MC_ADVANCE_RIP_AND_FINISH();
10815 IEM_MC_END();
10816 break;
10817
10818 case IEMMODE_64BIT:
10819 IEM_MC_BEGIN(3, 0);
10820 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10821 IEM_MC_ARG(uint64_t, u64Src, 1);
10822 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10823
10824 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10825 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10826 IEM_MC_REF_EFLAGS(pEFlags);
10827 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10828
10829 IEM_MC_ADVANCE_RIP_AND_FINISH();
10830 IEM_MC_END();
10831 break;
10832
10833 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10834 }
10835 }
10836 else
10837 {
10838 /*
10839 * We're accessing memory.
10840 */
10841 switch (pVCpu->iem.s.enmEffOpSize)
10842 {
10843 case IEMMODE_16BIT:
10844 IEM_MC_BEGIN(3, 1);
10845 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10846 IEM_MC_ARG(uint16_t, u16Src, 1);
10847 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10849
10850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10852 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10853 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10854 IEM_MC_REF_EFLAGS(pEFlags);
10855 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10856
10857 IEM_MC_ADVANCE_RIP_AND_FINISH();
10858 IEM_MC_END();
10859 break;
10860
10861 case IEMMODE_32BIT:
10862 IEM_MC_BEGIN(3, 1);
10863 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10864 IEM_MC_ARG(uint32_t, u32Src, 1);
10865 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10867
10868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10870 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10871 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10872 IEM_MC_REF_EFLAGS(pEFlags);
10873 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10874
10875 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10876 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10877 IEM_MC_ENDIF();
10878 IEM_MC_ADVANCE_RIP_AND_FINISH();
10879 IEM_MC_END();
10880 break;
10881
10882 case IEMMODE_64BIT:
10883 IEM_MC_BEGIN(3, 1);
10884 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10885 IEM_MC_ARG(uint64_t, u64Src, 1);
10886 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10887 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10888
10889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10891 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10892 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10893 IEM_MC_REF_EFLAGS(pEFlags);
10894 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10895
10896 IEM_MC_ADVANCE_RIP_AND_FINISH();
10897 IEM_MC_END();
10898 break;
10899
10900 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10901 }
10902 }
10903}
10904
10905
10906/** Opcode 0x0f 0xbc. */
10907FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
10908{
10909 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
10910 IEMOP_HLP_MIN_386();
10911 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10912 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
10913}
10914
10915
10916/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
10917FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
10918{
10919 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
10920 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
10921 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10922
10923#ifndef TST_IEM_CHECK_MC
10924 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
10925 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
10926 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
10927 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
10928 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
10929 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
10930 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
10931 {
10932 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
10933 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
10934 };
10935#endif
10936 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
10937 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
10938 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
10939}
10940
10941
10942/** Opcode 0x0f 0xbd. */
10943FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
10944{
10945 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
10946 IEMOP_HLP_MIN_386();
10947 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10948 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
10949}
10950
10951
10952/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
10953FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
10954{
10955 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
10956 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
10957 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10958
10959#ifndef TST_IEM_CHECK_MC
10960 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
10961 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
10962 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
10963 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
10964 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
10965 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
10966 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
10967 {
10968 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
10969 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
10970 };
10971#endif
10972 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
10973 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
10974 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
10975}
10976
10977
10978
10979/** Opcode 0x0f 0xbe. */
10980FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
10981{
10982 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
10983 IEMOP_HLP_MIN_386();
10984
10985 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10986
10987 /*
10988 * If rm is denoting a register, no more instruction bytes.
10989 */
10990 if (IEM_IS_MODRM_REG_MODE(bRm))
10991 {
10992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10993 switch (pVCpu->iem.s.enmEffOpSize)
10994 {
10995 case IEMMODE_16BIT:
10996 IEM_MC_BEGIN(0, 1);
10997 IEM_MC_LOCAL(uint16_t, u16Value);
10998 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10999 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11000 IEM_MC_ADVANCE_RIP_AND_FINISH();
11001 IEM_MC_END();
11002 break;
11003
11004 case IEMMODE_32BIT:
11005 IEM_MC_BEGIN(0, 1);
11006 IEM_MC_LOCAL(uint32_t, u32Value);
11007 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11008 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11009 IEM_MC_ADVANCE_RIP_AND_FINISH();
11010 IEM_MC_END();
11011 break;
11012
11013 case IEMMODE_64BIT:
11014 IEM_MC_BEGIN(0, 1);
11015 IEM_MC_LOCAL(uint64_t, u64Value);
11016 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11017 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11018 IEM_MC_ADVANCE_RIP_AND_FINISH();
11019 IEM_MC_END();
11020 break;
11021
11022 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11023 }
11024 }
11025 else
11026 {
11027 /*
11028 * We're loading a register from memory.
11029 */
11030 switch (pVCpu->iem.s.enmEffOpSize)
11031 {
11032 case IEMMODE_16BIT:
11033 IEM_MC_BEGIN(0, 2);
11034 IEM_MC_LOCAL(uint16_t, u16Value);
11035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11038 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11039 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11040 IEM_MC_ADVANCE_RIP_AND_FINISH();
11041 IEM_MC_END();
11042 break;
11043
11044 case IEMMODE_32BIT:
11045 IEM_MC_BEGIN(0, 2);
11046 IEM_MC_LOCAL(uint32_t, u32Value);
11047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11050 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11051 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11052 IEM_MC_ADVANCE_RIP_AND_FINISH();
11053 IEM_MC_END();
11054 break;
11055
11056 case IEMMODE_64BIT:
11057 IEM_MC_BEGIN(0, 2);
11058 IEM_MC_LOCAL(uint64_t, u64Value);
11059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11062 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11063 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11064 IEM_MC_ADVANCE_RIP_AND_FINISH();
11065 IEM_MC_END();
11066 break;
11067
11068 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11069 }
11070 }
11071}
11072
11073
11074/** Opcode 0x0f 0xbf. */
11075FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11076{
11077 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11078 IEMOP_HLP_MIN_386();
11079
11080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11081
11082 /** @todo Not entirely sure how the operand size prefix is handled here,
11083 * assuming that it will be ignored. Would be nice to have a few
11084 * test for this. */
11085 /*
11086 * If rm is denoting a register, no more instruction bytes.
11087 */
11088 if (IEM_IS_MODRM_REG_MODE(bRm))
11089 {
11090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11091 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11092 {
11093 IEM_MC_BEGIN(0, 1);
11094 IEM_MC_LOCAL(uint32_t, u32Value);
11095 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11096 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11097 IEM_MC_ADVANCE_RIP_AND_FINISH();
11098 IEM_MC_END();
11099 }
11100 else
11101 {
11102 IEM_MC_BEGIN(0, 1);
11103 IEM_MC_LOCAL(uint64_t, u64Value);
11104 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11105 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11106 IEM_MC_ADVANCE_RIP_AND_FINISH();
11107 IEM_MC_END();
11108 }
11109 }
11110 else
11111 {
11112 /*
11113 * We're loading a register from memory.
11114 */
11115 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11116 {
11117 IEM_MC_BEGIN(0, 2);
11118 IEM_MC_LOCAL(uint32_t, u32Value);
11119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11122 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11123 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11124 IEM_MC_ADVANCE_RIP_AND_FINISH();
11125 IEM_MC_END();
11126 }
11127 else
11128 {
11129 IEM_MC_BEGIN(0, 2);
11130 IEM_MC_LOCAL(uint64_t, u64Value);
11131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11134 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11135 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11136 IEM_MC_ADVANCE_RIP_AND_FINISH();
11137 IEM_MC_END();
11138 }
11139 }
11140}
11141
11142
11143/** Opcode 0x0f 0xc0. */
11144FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11145{
11146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11147 IEMOP_HLP_MIN_486();
11148 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11149
11150 /*
11151 * If rm is denoting a register, no more instruction bytes.
11152 */
11153 if (IEM_IS_MODRM_REG_MODE(bRm))
11154 {
11155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11156
11157 IEM_MC_BEGIN(3, 0);
11158 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11159 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11160 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11161
11162 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11163 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11164 IEM_MC_REF_EFLAGS(pEFlags);
11165 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11166
11167 IEM_MC_ADVANCE_RIP_AND_FINISH();
11168 IEM_MC_END();
11169 }
11170 else
11171 {
11172 /*
11173 * We're accessing memory.
11174 */
11175 IEM_MC_BEGIN(3, 3);
11176 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11177 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11178 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11179 IEM_MC_LOCAL(uint8_t, u8RegCopy);
11180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11181
11182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11183 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11184 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11185 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
11186 IEM_MC_FETCH_EFLAGS(EFlags);
11187 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11188 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11189 else
11190 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
11191
11192 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11193 IEM_MC_COMMIT_EFLAGS(EFlags);
11194 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
11195 IEM_MC_ADVANCE_RIP_AND_FINISH();
11196 IEM_MC_END();
11197 }
11198}
11199
11200
11201/** Opcode 0x0f 0xc1. */
11202FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11203{
11204 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11205 IEMOP_HLP_MIN_486();
11206 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11207
11208 /*
11209 * If rm is denoting a register, no more instruction bytes.
11210 */
11211 if (IEM_IS_MODRM_REG_MODE(bRm))
11212 {
11213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11214
11215 switch (pVCpu->iem.s.enmEffOpSize)
11216 {
11217 case IEMMODE_16BIT:
11218 IEM_MC_BEGIN(3, 0);
11219 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11220 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11221 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11222
11223 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11224 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11225 IEM_MC_REF_EFLAGS(pEFlags);
11226 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11227
11228 IEM_MC_ADVANCE_RIP_AND_FINISH();
11229 IEM_MC_END();
11230 break;
11231
11232 case IEMMODE_32BIT:
11233 IEM_MC_BEGIN(3, 0);
11234 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11235 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11236 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11237
11238 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11239 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11240 IEM_MC_REF_EFLAGS(pEFlags);
11241 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11242
11243 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11244 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11245 IEM_MC_ADVANCE_RIP_AND_FINISH();
11246 IEM_MC_END();
11247 break;
11248
11249 case IEMMODE_64BIT:
11250 IEM_MC_BEGIN(3, 0);
11251 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11252 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11253 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11254
11255 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11256 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11257 IEM_MC_REF_EFLAGS(pEFlags);
11258 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11259
11260 IEM_MC_ADVANCE_RIP_AND_FINISH();
11261 IEM_MC_END();
11262 break;
11263
11264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11265 }
11266 }
11267 else
11268 {
11269 /*
11270 * We're accessing memory.
11271 */
11272 switch (pVCpu->iem.s.enmEffOpSize)
11273 {
11274 case IEMMODE_16BIT:
11275 IEM_MC_BEGIN(3, 3);
11276 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11277 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11278 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11279 IEM_MC_LOCAL(uint16_t, u16RegCopy);
11280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11281
11282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11283 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11284 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11285 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
11286 IEM_MC_FETCH_EFLAGS(EFlags);
11287 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11288 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11289 else
11290 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
11291
11292 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
11293 IEM_MC_COMMIT_EFLAGS(EFlags);
11294 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
11295 IEM_MC_ADVANCE_RIP_AND_FINISH();
11296 IEM_MC_END();
11297 break;
11298
11299 case IEMMODE_32BIT:
11300 IEM_MC_BEGIN(3, 3);
11301 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11302 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11303 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11304 IEM_MC_LOCAL(uint32_t, u32RegCopy);
11305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11306
11307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11308 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11309 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11310 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
11311 IEM_MC_FETCH_EFLAGS(EFlags);
11312 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11313 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11314 else
11315 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
11316
11317 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
11318 IEM_MC_COMMIT_EFLAGS(EFlags);
11319 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
11320 IEM_MC_ADVANCE_RIP_AND_FINISH();
11321 IEM_MC_END();
11322 break;
11323
11324 case IEMMODE_64BIT:
11325 IEM_MC_BEGIN(3, 3);
11326 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11327 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11328 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11329 IEM_MC_LOCAL(uint64_t, u64RegCopy);
11330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11331
11332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11333 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11334 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11335 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
11336 IEM_MC_FETCH_EFLAGS(EFlags);
11337 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11338 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11339 else
11340 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
11341
11342 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
11343 IEM_MC_COMMIT_EFLAGS(EFlags);
11344 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
11345 IEM_MC_ADVANCE_RIP_AND_FINISH();
11346 IEM_MC_END();
11347 break;
11348
11349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11350 }
11351 }
11352}
11353
11354
11355/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11356FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11357{
11358 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11359
11360 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11361 if (IEM_IS_MODRM_REG_MODE(bRm))
11362 {
11363 /*
11364 * Register, register.
11365 */
11366 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11368 IEM_MC_BEGIN(4, 2);
11369 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11370 IEM_MC_LOCAL(X86XMMREG, Dst);
11371 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11372 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11373 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11374 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11375 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11376 IEM_MC_PREPARE_SSE_USAGE();
11377 IEM_MC_REF_MXCSR(pfMxcsr);
11378 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11379 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11380 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11381 IEM_MC_IF_MXCSR_XCPT_PENDING()
11382 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11383 IEM_MC_ELSE()
11384 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11385 IEM_MC_ENDIF();
11386
11387 IEM_MC_ADVANCE_RIP_AND_FINISH();
11388 IEM_MC_END();
11389 }
11390 else
11391 {
11392 /*
11393 * Register, memory.
11394 */
11395 IEM_MC_BEGIN(4, 3);
11396 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11397 IEM_MC_LOCAL(X86XMMREG, Dst);
11398 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11399 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11400 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11402
11403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11404 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11405 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11407 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11408 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11409
11410 IEM_MC_PREPARE_SSE_USAGE();
11411 IEM_MC_REF_MXCSR(pfMxcsr);
11412 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11413 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11414 IEM_MC_IF_MXCSR_XCPT_PENDING()
11415 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11416 IEM_MC_ELSE()
11417 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11418 IEM_MC_ENDIF();
11419
11420 IEM_MC_ADVANCE_RIP_AND_FINISH();
11421 IEM_MC_END();
11422 }
11423}
11424
11425
11426/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11427FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11428{
11429 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11430
11431 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11432 if (IEM_IS_MODRM_REG_MODE(bRm))
11433 {
11434 /*
11435 * Register, register.
11436 */
11437 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11439 IEM_MC_BEGIN(4, 2);
11440 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11441 IEM_MC_LOCAL(X86XMMREG, Dst);
11442 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11443 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11444 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11445 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11446 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11447 IEM_MC_PREPARE_SSE_USAGE();
11448 IEM_MC_REF_MXCSR(pfMxcsr);
11449 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11450 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11451 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11452 IEM_MC_IF_MXCSR_XCPT_PENDING()
11453 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11454 IEM_MC_ELSE()
11455 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11456 IEM_MC_ENDIF();
11457
11458 IEM_MC_ADVANCE_RIP_AND_FINISH();
11459 IEM_MC_END();
11460 }
11461 else
11462 {
11463 /*
11464 * Register, memory.
11465 */
11466 IEM_MC_BEGIN(4, 3);
11467 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11468 IEM_MC_LOCAL(X86XMMREG, Dst);
11469 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11470 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11471 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11473
11474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11475 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11476 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11478 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11479 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11480
11481 IEM_MC_PREPARE_SSE_USAGE();
11482 IEM_MC_REF_MXCSR(pfMxcsr);
11483 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11484 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11485 IEM_MC_IF_MXCSR_XCPT_PENDING()
11486 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11487 IEM_MC_ELSE()
11488 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11489 IEM_MC_ENDIF();
11490
11491 IEM_MC_ADVANCE_RIP_AND_FINISH();
11492 IEM_MC_END();
11493 }
11494}
11495
11496
11497/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11498FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11499{
11500 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11501
11502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11503 if (IEM_IS_MODRM_REG_MODE(bRm))
11504 {
11505 /*
11506 * Register, register.
11507 */
11508 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11510 IEM_MC_BEGIN(4, 2);
11511 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11512 IEM_MC_LOCAL(X86XMMREG, Dst);
11513 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11514 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11515 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11516 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11517 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11518 IEM_MC_PREPARE_SSE_USAGE();
11519 IEM_MC_REF_MXCSR(pfMxcsr);
11520 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11521 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11522 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11523 IEM_MC_IF_MXCSR_XCPT_PENDING()
11524 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11525 IEM_MC_ELSE()
11526 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11527 IEM_MC_ENDIF();
11528
11529 IEM_MC_ADVANCE_RIP_AND_FINISH();
11530 IEM_MC_END();
11531 }
11532 else
11533 {
11534 /*
11535 * Register, memory.
11536 */
11537 IEM_MC_BEGIN(4, 3);
11538 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11539 IEM_MC_LOCAL(X86XMMREG, Dst);
11540 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11541 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11542 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11544
11545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11546 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11547 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11549 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11550 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11551
11552 IEM_MC_PREPARE_SSE_USAGE();
11553 IEM_MC_REF_MXCSR(pfMxcsr);
11554 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11555 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11556 IEM_MC_IF_MXCSR_XCPT_PENDING()
11557 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11558 IEM_MC_ELSE()
11559 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11560 IEM_MC_ENDIF();
11561
11562 IEM_MC_ADVANCE_RIP_AND_FINISH();
11563 IEM_MC_END();
11564 }
11565}
11566
11567
11568/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11569FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11570{
11571 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11572
11573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11574 if (IEM_IS_MODRM_REG_MODE(bRm))
11575 {
11576 /*
11577 * Register, register.
11578 */
11579 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11581 IEM_MC_BEGIN(4, 2);
11582 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11583 IEM_MC_LOCAL(X86XMMREG, Dst);
11584 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11585 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11586 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11587 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11588 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11589 IEM_MC_PREPARE_SSE_USAGE();
11590 IEM_MC_REF_MXCSR(pfMxcsr);
11591 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11592 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11593 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11594 IEM_MC_IF_MXCSR_XCPT_PENDING()
11595 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11596 IEM_MC_ELSE()
11597 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11598 IEM_MC_ENDIF();
11599
11600 IEM_MC_ADVANCE_RIP_AND_FINISH();
11601 IEM_MC_END();
11602 }
11603 else
11604 {
11605 /*
11606 * Register, memory.
11607 */
11608 IEM_MC_BEGIN(4, 3);
11609 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11610 IEM_MC_LOCAL(X86XMMREG, Dst);
11611 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11612 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11613 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11615
11616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11617 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11618 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11620 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11621 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11622
11623 IEM_MC_PREPARE_SSE_USAGE();
11624 IEM_MC_REF_MXCSR(pfMxcsr);
11625 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11626 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11627 IEM_MC_IF_MXCSR_XCPT_PENDING()
11628 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11629 IEM_MC_ELSE()
11630 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11631 IEM_MC_ENDIF();
11632
11633 IEM_MC_ADVANCE_RIP_AND_FINISH();
11634 IEM_MC_END();
11635 }
11636}
11637
11638
11639/** Opcode 0x0f 0xc3. */
11640FNIEMOP_DEF(iemOp_movnti_My_Gy)
11641{
11642 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
11643
11644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11645
11646 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
11647 if (IEM_IS_MODRM_MEM_MODE(bRm))
11648 {
11649 switch (pVCpu->iem.s.enmEffOpSize)
11650 {
11651 case IEMMODE_32BIT:
11652 IEM_MC_BEGIN(0, 2);
11653 IEM_MC_LOCAL(uint32_t, u32Value);
11654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11655
11656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11658 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
11659 return IEMOP_RAISE_INVALID_OPCODE();
11660
11661 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11662 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11663 IEM_MC_ADVANCE_RIP_AND_FINISH();
11664 IEM_MC_END();
11665 break;
11666
11667 case IEMMODE_64BIT:
11668 IEM_MC_BEGIN(0, 2);
11669 IEM_MC_LOCAL(uint64_t, u64Value);
11670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11671
11672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11674 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
11675 return IEMOP_RAISE_INVALID_OPCODE();
11676
11677 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11678 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11679 IEM_MC_ADVANCE_RIP_AND_FINISH();
11680 IEM_MC_END();
11681 break;
11682
11683 case IEMMODE_16BIT:
11684 /** @todo check this form. */
11685 return IEMOP_RAISE_INVALID_OPCODE();
11686
11687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11688 }
11689 }
11690 else
11691 return IEMOP_RAISE_INVALID_OPCODE();
11692}
11693
11694
11695/* Opcode 0x66 0x0f 0xc3 - invalid */
11696/* Opcode 0xf3 0x0f 0xc3 - invalid */
11697/* Opcode 0xf2 0x0f 0xc3 - invalid */
11698
11699
11700/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
11701FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
11702{
11703 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11705 if (IEM_IS_MODRM_REG_MODE(bRm))
11706 {
11707 /*
11708 * Register, register.
11709 */
11710 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11712 IEM_MC_BEGIN(3, 0);
11713 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11714 IEM_MC_ARG(uint16_t, u16Src, 1);
11715 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11716 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11717 IEM_MC_PREPARE_FPU_USAGE();
11718 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11719 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11720 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bEvilArg);
11721 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11722 IEM_MC_FPU_TO_MMX_MODE();
11723 IEM_MC_ADVANCE_RIP_AND_FINISH();
11724 IEM_MC_END();
11725 }
11726 else
11727 {
11728 /*
11729 * Register, memory.
11730 */
11731 IEM_MC_BEGIN(3, 1);
11732 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11733 IEM_MC_ARG(uint16_t, u16Src, 1);
11734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11735
11736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11737 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11738 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11740 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11741 IEM_MC_PREPARE_FPU_USAGE();
11742
11743 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11744 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11745 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bEvilArg);
11746 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11747 IEM_MC_FPU_TO_MMX_MODE();
11748 IEM_MC_ADVANCE_RIP_AND_FINISH();
11749 IEM_MC_END();
11750 }
11751}
11752
11753
11754/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
11755FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
11756{
11757 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11758 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11759 if (IEM_IS_MODRM_REG_MODE(bRm))
11760 {
11761 /*
11762 * Register, register.
11763 */
11764 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11766 IEM_MC_BEGIN(3, 0);
11767 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11768 IEM_MC_ARG(uint16_t, u16Src, 1);
11769 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11770 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11771 IEM_MC_PREPARE_SSE_USAGE();
11772 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11773 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11774 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bEvilArg);
11775 IEM_MC_ADVANCE_RIP_AND_FINISH();
11776 IEM_MC_END();
11777 }
11778 else
11779 {
11780 /*
11781 * Register, memory.
11782 */
11783 IEM_MC_BEGIN(3, 2);
11784 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11785 IEM_MC_ARG(uint16_t, u16Src, 1);
11786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11787
11788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11789 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11790 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11792 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11793 IEM_MC_PREPARE_SSE_USAGE();
11794
11795 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11796 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11797 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bEvilArg);
11798 IEM_MC_ADVANCE_RIP_AND_FINISH();
11799 IEM_MC_END();
11800 }
11801}
11802
11803
11804/* Opcode 0xf3 0x0f 0xc4 - invalid */
11805/* Opcode 0xf2 0x0f 0xc4 - invalid */
11806
11807
11808/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
11809FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
11810{
11811 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);*/ /** @todo */
11812 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11813 if (IEM_IS_MODRM_REG_MODE(bRm))
11814 {
11815 /*
11816 * Greg32, MMX, imm8.
11817 */
11818 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11820 IEM_MC_BEGIN(3, 1);
11821 IEM_MC_LOCAL(uint16_t, u16Dst);
11822 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11823 IEM_MC_ARG(uint64_t, u64Src, 1);
11824 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11825 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11826 IEM_MC_PREPARE_FPU_USAGE();
11827 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11828 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bEvilArg);
11829 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
11830 IEM_MC_FPU_TO_MMX_MODE();
11831 IEM_MC_ADVANCE_RIP_AND_FINISH();
11832 IEM_MC_END();
11833 }
11834 /* No memory operand. */
11835 else
11836 return IEMOP_RAISE_INVALID_OPCODE();
11837}
11838
11839
11840/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
11841FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
11842{
11843 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11844 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11845 if (IEM_IS_MODRM_REG_MODE(bRm))
11846 {
11847 /*
11848 * Greg32, XMM, imm8.
11849 */
11850 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11852 IEM_MC_BEGIN(3, 1);
11853 IEM_MC_LOCAL(uint16_t, u16Dst);
11854 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11855 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
11856 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11857 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11858 IEM_MC_PREPARE_SSE_USAGE();
11859 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11860 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bEvilArg);
11861 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
11862 IEM_MC_ADVANCE_RIP_AND_FINISH();
11863 IEM_MC_END();
11864 }
11865 /* No memory operand. */
11866 else
11867 return IEMOP_RAISE_INVALID_OPCODE();
11868}
11869
11870
11871/* Opcode 0xf3 0x0f 0xc5 - invalid */
11872/* Opcode 0xf2 0x0f 0xc5 - invalid */
11873
11874
11875/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
11876FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
11877{
11878 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11880 if (IEM_IS_MODRM_REG_MODE(bRm))
11881 {
11882 /*
11883 * XMM, XMM, imm8.
11884 */
11885 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11887 IEM_MC_BEGIN(3, 0);
11888 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11889 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
11890 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11891 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11892 IEM_MC_PREPARE_SSE_USAGE();
11893 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11894 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11895 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bEvilArg);
11896 IEM_MC_ADVANCE_RIP_AND_FINISH();
11897 IEM_MC_END();
11898 }
11899 else
11900 {
11901 /*
11902 * XMM, [mem128], imm8.
11903 */
11904 IEM_MC_BEGIN(3, 2);
11905 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11906 IEM_MC_LOCAL(RTUINT128U, uSrc);
11907 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
11908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11909
11910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11911 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11912 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11914 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11915 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11916
11917 IEM_MC_PREPARE_SSE_USAGE();
11918 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11919 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bEvilArg);
11920
11921 IEM_MC_ADVANCE_RIP_AND_FINISH();
11922 IEM_MC_END();
11923 }
11924}
11925
11926
11927/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
11928FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
11929{
11930 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11931 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11932 if (IEM_IS_MODRM_REG_MODE(bRm))
11933 {
11934 /*
11935 * XMM, XMM, imm8.
11936 */
11937 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11939 IEM_MC_BEGIN(3, 0);
11940 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11941 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
11942 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11943 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11944 IEM_MC_PREPARE_SSE_USAGE();
11945 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11946 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11947 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bEvilArg);
11948 IEM_MC_ADVANCE_RIP_AND_FINISH();
11949 IEM_MC_END();
11950 }
11951 else
11952 {
11953 /*
11954 * XMM, [mem128], imm8.
11955 */
11956 IEM_MC_BEGIN(3, 2);
11957 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11958 IEM_MC_LOCAL(RTUINT128U, uSrc);
11959 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
11960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11961
11962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11963 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11964 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11966 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11967 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11968
11969 IEM_MC_PREPARE_SSE_USAGE();
11970 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11971 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bEvilArg);
11972
11973 IEM_MC_ADVANCE_RIP_AND_FINISH();
11974 IEM_MC_END();
11975 }
11976}
11977
11978
11979/* Opcode 0xf3 0x0f 0xc6 - invalid */
11980/* Opcode 0xf2 0x0f 0xc6 - invalid */
11981
11982
11983/** Opcode 0x0f 0xc7 !11/1. */
11984FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
11985{
11986 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
11987
11988 IEM_MC_BEGIN(4, 3);
11989 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
11990 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
11991 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
11992 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
11993 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
11994 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
11995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11996
11997 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11998 IEMOP_HLP_DONE_DECODING();
11999 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12000
12001 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
12002 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
12003 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
12004
12005 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
12006 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
12007 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
12008
12009 IEM_MC_FETCH_EFLAGS(EFlags);
12010 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12011 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12012 else
12013 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12014
12015 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
12016 IEM_MC_COMMIT_EFLAGS(EFlags);
12017 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
12018 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
12019 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
12020 IEM_MC_ENDIF();
12021 IEM_MC_ADVANCE_RIP_AND_FINISH();
12022
12023 IEM_MC_END();
12024}
12025
12026
12027/** Opcode REX.W 0x0f 0xc7 !11/1. */
12028FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12029{
12030 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12031 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12032 {
12033#if 0
12034 RT_NOREF(bRm);
12035 IEMOP_BITCH_ABOUT_STUB();
12036 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
12037#else
12038 IEM_MC_BEGIN(4, 3);
12039 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
12040 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
12041 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
12042 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12043 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
12044 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
12045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12046
12047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12048 IEMOP_HLP_DONE_DECODING();
12049 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
12050 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12051
12052 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
12053 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
12054 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
12055
12056 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
12057 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
12058 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
12059
12060 IEM_MC_FETCH_EFLAGS(EFlags);
12061# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)
12062# if defined(RT_ARCH_AMD64)
12063 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12064# endif
12065 {
12066 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12067 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12068 else
12069 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12070 }
12071# if defined(RT_ARCH_AMD64)
12072 else
12073# endif
12074# endif
12075# if !defined(RT_ARCH_ARM64) /** @todo may need this for unaligned accesses... */
12076 {
12077 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12078 accesses and not all all atomic, which works fine on in UNI CPU guest
12079 configuration (ignoring DMA). If guest SMP is active we have no choice
12080 but to use a rendezvous callback here. Sigh. */
12081 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12082 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12083 else
12084 {
12085 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12086 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12087 }
12088 }
12089# endif
12090
12091 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
12092 IEM_MC_COMMIT_EFLAGS(EFlags);
12093 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
12094 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
12095 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
12096 IEM_MC_ENDIF();
12097 IEM_MC_ADVANCE_RIP_AND_FINISH();
12098
12099 IEM_MC_END();
12100#endif
12101 }
12102 Log(("cmpxchg16b -> #UD\n"));
12103 return IEMOP_RAISE_INVALID_OPCODE();
12104}
12105
12106FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12107{
12108 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12109 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12110 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12111}
12112
12113
12114/** Opcode 0x0f 0xc7 11/6. */
12115FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12116{
12117 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12118 return IEMOP_RAISE_INVALID_OPCODE();
12119
12120 if (IEM_IS_MODRM_REG_MODE(bRm))
12121 {
12122 /* register destination. */
12123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12124 switch (pVCpu->iem.s.enmEffOpSize)
12125 {
12126 case IEMMODE_16BIT:
12127 IEM_MC_BEGIN(2, 0);
12128 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12129 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12130
12131 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12132 IEM_MC_REF_EFLAGS(pEFlags);
12133 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u16, iemAImpl_rdrand_u16_fallback),
12134 pu16Dst, pEFlags);
12135
12136 IEM_MC_ADVANCE_RIP_AND_FINISH();
12137 IEM_MC_END();
12138 break;
12139
12140 case IEMMODE_32BIT:
12141 IEM_MC_BEGIN(2, 0);
12142 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12143 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12144
12145 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12146 IEM_MC_REF_EFLAGS(pEFlags);
12147 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u32, iemAImpl_rdrand_u32_fallback),
12148 pu32Dst, pEFlags);
12149
12150 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12151 IEM_MC_ADVANCE_RIP_AND_FINISH();
12152 IEM_MC_END();
12153 break;
12154
12155 case IEMMODE_64BIT:
12156 IEM_MC_BEGIN(2, 0);
12157 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12158 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12159
12160 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12161 IEM_MC_REF_EFLAGS(pEFlags);
12162 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u64, iemAImpl_rdrand_u64_fallback),
12163 pu64Dst, pEFlags);
12164
12165 IEM_MC_ADVANCE_RIP_AND_FINISH();
12166 IEM_MC_END();
12167 break;
12168
12169 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12170 }
12171 }
12172 /* Register only. */
12173 else
12174 return IEMOP_RAISE_INVALID_OPCODE();
12175}
12176
12177/** Opcode 0x0f 0xc7 !11/6. */
12178#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12179FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12180{
12181 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12182 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12183 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12184 IEM_MC_BEGIN(2, 0);
12185 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12186 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12188 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12189 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12190 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12191 IEM_MC_END();
12192 return VINF_SUCCESS;
12193}
12194#else
12195FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12196#endif
12197
12198/** Opcode 0x66 0x0f 0xc7 !11/6. */
12199#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12200FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12201{
12202 IEMOP_MNEMONIC(vmclear, "vmclear");
12203 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12204 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12205 IEM_MC_BEGIN(2, 0);
12206 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12207 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12209 IEMOP_HLP_DONE_DECODING();
12210 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12211 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12212 IEM_MC_END();
12213 return VINF_SUCCESS;
12214}
12215#else
12216FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12217#endif
12218
12219/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12220#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12221FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12222{
12223 IEMOP_MNEMONIC(vmxon, "vmxon");
12224 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12225 IEM_MC_BEGIN(2, 0);
12226 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12227 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12229 IEMOP_HLP_DONE_DECODING();
12230 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12231 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12232 IEM_MC_END();
12233 return VINF_SUCCESS;
12234}
12235#else
12236FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12237#endif
12238
12239/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12240#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12241FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12242{
12243 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12244 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12245 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12246 IEM_MC_BEGIN(2, 0);
12247 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12248 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12250 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12251 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12252 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12253 IEM_MC_END();
12254 return VINF_SUCCESS;
12255}
12256#else
12257FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12258#endif
12259
12260/** Opcode 0x0f 0xc7 11/7. */
12261FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12262{
12263 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12264 return IEMOP_RAISE_INVALID_OPCODE();
12265
12266 if (IEM_IS_MODRM_REG_MODE(bRm))
12267 {
12268 /* register destination. */
12269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12270 switch (pVCpu->iem.s.enmEffOpSize)
12271 {
12272 case IEMMODE_16BIT:
12273 IEM_MC_BEGIN(2, 0);
12274 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12275 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12276
12277 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12278 IEM_MC_REF_EFLAGS(pEFlags);
12279 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u16, iemAImpl_rdseed_u16_fallback),
12280 pu16Dst, pEFlags);
12281
12282 IEM_MC_ADVANCE_RIP_AND_FINISH();
12283 IEM_MC_END();
12284 break;
12285
12286 case IEMMODE_32BIT:
12287 IEM_MC_BEGIN(2, 0);
12288 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12289 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12290
12291 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12292 IEM_MC_REF_EFLAGS(pEFlags);
12293 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u32, iemAImpl_rdseed_u32_fallback),
12294 pu32Dst, pEFlags);
12295
12296 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12297 IEM_MC_ADVANCE_RIP_AND_FINISH();
12298 IEM_MC_END();
12299 break;
12300
12301 case IEMMODE_64BIT:
12302 IEM_MC_BEGIN(2, 0);
12303 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12304 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12305
12306 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12307 IEM_MC_REF_EFLAGS(pEFlags);
12308 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u64, iemAImpl_rdseed_u64_fallback),
12309 pu64Dst, pEFlags);
12310
12311 IEM_MC_ADVANCE_RIP_AND_FINISH();
12312 IEM_MC_END();
12313 break;
12314
12315 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12316 }
12317 }
12318 /* Register only. */
12319 else
12320 return IEMOP_RAISE_INVALID_OPCODE();
12321}
12322
12323/**
12324 * Group 9 jump table for register variant.
12325 */
12326IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12327{ /* pfx: none, 066h, 0f3h, 0f2h */
12328 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12329 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12330 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12331 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12332 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12333 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12334 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12335 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12336};
12337AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12338
12339
12340/**
12341 * Group 9 jump table for memory variant.
12342 */
12343IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12344{ /* pfx: none, 066h, 0f3h, 0f2h */
12345 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12346 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12347 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12348 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12349 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12350 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12351 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12352 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12353};
12354AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12355
12356
12357/** Opcode 0x0f 0xc7. */
12358FNIEMOP_DEF(iemOp_Grp9)
12359{
12360 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
12361 if (IEM_IS_MODRM_REG_MODE(bRm))
12362 /* register, register */
12363 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12364 + pVCpu->iem.s.idxPrefix], bRm);
12365 /* memory, register */
12366 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12367 + pVCpu->iem.s.idxPrefix], bRm);
12368}
12369
12370
12371/**
12372 * Common 'bswap register' helper.
12373 */
12374FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12375{
12376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12377 switch (pVCpu->iem.s.enmEffOpSize)
12378 {
12379 case IEMMODE_16BIT:
12380 IEM_MC_BEGIN(1, 0);
12381 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12382 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12383 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12384 IEM_MC_ADVANCE_RIP_AND_FINISH();
12385 IEM_MC_END();
12386 break;
12387
12388 case IEMMODE_32BIT:
12389 IEM_MC_BEGIN(1, 0);
12390 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12391 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12392 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12393 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12394 IEM_MC_ADVANCE_RIP_AND_FINISH();
12395 IEM_MC_END();
12396 break;
12397
12398 case IEMMODE_64BIT:
12399 IEM_MC_BEGIN(1, 0);
12400 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12401 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12402 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12403 IEM_MC_ADVANCE_RIP_AND_FINISH();
12404 IEM_MC_END();
12405 break;
12406
12407 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12408 }
12409}
12410
12411
12412/** Opcode 0x0f 0xc8. */
12413FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12414{
12415 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12416 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12417 prefix. REX.B is the correct prefix it appears. For a parallel
12418 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12419 IEMOP_HLP_MIN_486();
12420 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12421}
12422
12423
12424/** Opcode 0x0f 0xc9. */
12425FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12426{
12427 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12428 IEMOP_HLP_MIN_486();
12429 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12430}
12431
12432
12433/** Opcode 0x0f 0xca. */
12434FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12435{
12436 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12437 IEMOP_HLP_MIN_486();
12438 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12439}
12440
12441
12442/** Opcode 0x0f 0xcb. */
12443FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12444{
12445 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12446 IEMOP_HLP_MIN_486();
12447 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12448}
12449
12450
12451/** Opcode 0x0f 0xcc. */
12452FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12453{
12454 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12455 IEMOP_HLP_MIN_486();
12456 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12457}
12458
12459
12460/** Opcode 0x0f 0xcd. */
12461FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12462{
12463 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12464 IEMOP_HLP_MIN_486();
12465 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12466}
12467
12468
12469/** Opcode 0x0f 0xce. */
12470FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12471{
12472 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12473 IEMOP_HLP_MIN_486();
12474 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12475}
12476
12477
12478/** Opcode 0x0f 0xcf. */
12479FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12480{
12481 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12482 IEMOP_HLP_MIN_486();
12483 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12484}
12485
12486
12487/* Opcode 0x0f 0xd0 - invalid */
12488
12489
12490/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12491FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12492{
12493 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
12494 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12495}
12496
12497
12498/* Opcode 0xf3 0x0f 0xd0 - invalid */
12499
12500
12501/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12502FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12503{
12504 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
12505 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12506}
12507
12508
12509
12510/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12511FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12512{
12513 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12514 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12515}
12516
12517/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12518FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12519{
12520 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12521 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12522}
12523
12524/* Opcode 0xf3 0x0f 0xd1 - invalid */
12525/* Opcode 0xf2 0x0f 0xd1 - invalid */
12526
12527/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12528FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12529{
12530 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12531 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12532}
12533
12534
12535/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12536FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12537{
12538 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12539 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12540}
12541
12542
12543/* Opcode 0xf3 0x0f 0xd2 - invalid */
12544/* Opcode 0xf2 0x0f 0xd2 - invalid */
12545
12546/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12547FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12548{
12549 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12550 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12551}
12552
12553
12554/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12555FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12556{
12557 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12558 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12559}
12560
12561
12562/* Opcode 0xf3 0x0f 0xd3 - invalid */
12563/* Opcode 0xf2 0x0f 0xd3 - invalid */
12564
12565
12566/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12567FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12568{
12569 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12570 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_paddq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
12571}
12572
12573
12574/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12575FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12576{
12577 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12578 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
12579}
12580
12581
12582/* Opcode 0xf3 0x0f 0xd4 - invalid */
12583/* Opcode 0xf2 0x0f 0xd4 - invalid */
12584
12585/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12586FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12587{
12588 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12589 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
12590}
12591
12592/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12593FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12594{
12595 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12596 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
12597}
12598
12599
12600/* Opcode 0xf3 0x0f 0xd5 - invalid */
12601/* Opcode 0xf2 0x0f 0xd5 - invalid */
12602
12603/* Opcode 0x0f 0xd6 - invalid */
12604
12605/**
12606 * @opcode 0xd6
12607 * @oppfx 0x66
12608 * @opcpuid sse2
12609 * @opgroup og_sse2_pcksclr_datamove
12610 * @opxcpttype none
12611 * @optest op1=-1 op2=2 -> op1=2
12612 * @optest op1=0 op2=-42 -> op1=-42
12613 */
12614FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12615{
12616 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12617 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12618 if (IEM_IS_MODRM_REG_MODE(bRm))
12619 {
12620 /*
12621 * Register, register.
12622 */
12623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12624 IEM_MC_BEGIN(0, 2);
12625 IEM_MC_LOCAL(uint64_t, uSrc);
12626
12627 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12628 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12629
12630 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
12631 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12632
12633 IEM_MC_ADVANCE_RIP_AND_FINISH();
12634 IEM_MC_END();
12635 }
12636 else
12637 {
12638 /*
12639 * Memory, register.
12640 */
12641 IEM_MC_BEGIN(0, 2);
12642 IEM_MC_LOCAL(uint64_t, uSrc);
12643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12644
12645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12647 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12648 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12649
12650 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
12651 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12652
12653 IEM_MC_ADVANCE_RIP_AND_FINISH();
12654 IEM_MC_END();
12655 }
12656}
12657
12658
12659/**
12660 * @opcode 0xd6
12661 * @opcodesub 11 mr/reg
12662 * @oppfx f3
12663 * @opcpuid sse2
12664 * @opgroup og_sse2_simdint_datamove
12665 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12666 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12667 */
12668FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12669{
12670 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12671 if (IEM_IS_MODRM_REG_MODE(bRm))
12672 {
12673 /*
12674 * Register, register.
12675 */
12676 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12678 IEM_MC_BEGIN(0, 1);
12679 IEM_MC_LOCAL(uint64_t, uSrc);
12680
12681 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12682 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12683 IEM_MC_FPU_TO_MMX_MODE();
12684
12685 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
12686 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
12687
12688 IEM_MC_ADVANCE_RIP_AND_FINISH();
12689 IEM_MC_END();
12690 }
12691
12692 /**
12693 * @opdone
12694 * @opmnemonic udf30fd6mem
12695 * @opcode 0xd6
12696 * @opcodesub !11 mr/reg
12697 * @oppfx f3
12698 * @opunused intel-modrm
12699 * @opcpuid sse
12700 * @optest ->
12701 */
12702 else
12703 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12704}
12705
12706
12707/**
12708 * @opcode 0xd6
12709 * @opcodesub 11 mr/reg
12710 * @oppfx f2
12711 * @opcpuid sse2
12712 * @opgroup og_sse2_simdint_datamove
12713 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12714 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12715 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
12716 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
12717 * @optest op1=-42 op2=0xfedcba9876543210
12718 * -> op1=0xfedcba9876543210 ftw=0xff
12719 */
12720FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
12721{
12722 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12723 if (IEM_IS_MODRM_REG_MODE(bRm))
12724 {
12725 /*
12726 * Register, register.
12727 */
12728 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12730 IEM_MC_BEGIN(0, 1);
12731 IEM_MC_LOCAL(uint64_t, uSrc);
12732
12733 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12734 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12735 IEM_MC_FPU_TO_MMX_MODE();
12736
12737 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12738 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
12739
12740 IEM_MC_ADVANCE_RIP_AND_FINISH();
12741 IEM_MC_END();
12742 }
12743
12744 /**
12745 * @opdone
12746 * @opmnemonic udf20fd6mem
12747 * @opcode 0xd6
12748 * @opcodesub !11 mr/reg
12749 * @oppfx f2
12750 * @opunused intel-modrm
12751 * @opcpuid sse
12752 * @optest ->
12753 */
12754 else
12755 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12756}
12757
12758
12759/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
12760FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
12761{
12762 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12763 /* Docs says register only. */
12764 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12765 {
12766 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12767 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS, 0);
12768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12769 IEM_MC_BEGIN(2, 0);
12770 IEM_MC_ARG(uint64_t *, puDst, 0);
12771 IEM_MC_ARG(uint64_t const *, puSrc, 1);
12772 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
12773 IEM_MC_PREPARE_FPU_USAGE();
12774 IEM_MC_FPU_TO_MMX_MODE();
12775
12776 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12777 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
12778 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
12779
12780 IEM_MC_ADVANCE_RIP_AND_FINISH();
12781 IEM_MC_END();
12782 }
12783 else
12784 return IEMOP_RAISE_INVALID_OPCODE();
12785}
12786
12787
12788/** Opcode 0x66 0x0f 0xd7 - */
12789FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
12790{
12791 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12792 /* Docs says register only. */
12793 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12794 {
12795 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12796 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
12797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12798 IEM_MC_BEGIN(2, 0);
12799 IEM_MC_ARG(uint64_t *, puDst, 0);
12800 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12801 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12802 IEM_MC_PREPARE_SSE_USAGE();
12803 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12804 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12805 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
12806 IEM_MC_ADVANCE_RIP_AND_FINISH();
12807 IEM_MC_END();
12808 }
12809 else
12810 return IEMOP_RAISE_INVALID_OPCODE();
12811}
12812
12813
12814/* Opcode 0xf3 0x0f 0xd7 - invalid */
12815/* Opcode 0xf2 0x0f 0xd7 - invalid */
12816
12817
12818/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
12819FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
12820{
12821 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12822 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
12823}
12824
12825
12826/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
12827FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
12828{
12829 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12830 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
12831}
12832
12833
12834/* Opcode 0xf3 0x0f 0xd8 - invalid */
12835/* Opcode 0xf2 0x0f 0xd8 - invalid */
12836
12837/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
12838FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
12839{
12840 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12841 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
12842}
12843
12844
12845/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
12846FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
12847{
12848 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12849 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
12850}
12851
12852
12853/* Opcode 0xf3 0x0f 0xd9 - invalid */
12854/* Opcode 0xf2 0x0f 0xd9 - invalid */
12855
12856/** Opcode 0x0f 0xda - pminub Pq, Qq */
12857FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
12858{
12859 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12860 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
12861}
12862
12863
12864/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
12865FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
12866{
12867 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12868 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
12869}
12870
12871/* Opcode 0xf3 0x0f 0xda - invalid */
12872/* Opcode 0xf2 0x0f 0xda - invalid */
12873
12874/** Opcode 0x0f 0xdb - pand Pq, Qq */
12875FNIEMOP_DEF(iemOp_pand_Pq_Qq)
12876{
12877 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12878 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
12879}
12880
12881
12882/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
12883FNIEMOP_DEF(iemOp_pand_Vx_Wx)
12884{
12885 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12886 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
12887}
12888
12889
12890/* Opcode 0xf3 0x0f 0xdb - invalid */
12891/* Opcode 0xf2 0x0f 0xdb - invalid */
12892
12893/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
12894FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
12895{
12896 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12897 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
12898}
12899
12900
12901/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
12902FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
12903{
12904 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12905 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
12906}
12907
12908
12909/* Opcode 0xf3 0x0f 0xdc - invalid */
12910/* Opcode 0xf2 0x0f 0xdc - invalid */
12911
12912/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
12913FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
12914{
12915 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12916 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
12917}
12918
12919
12920/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
12921FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
12922{
12923 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12924 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
12925}
12926
12927
12928/* Opcode 0xf3 0x0f 0xdd - invalid */
12929/* Opcode 0xf2 0x0f 0xdd - invalid */
12930
12931/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
12932FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
12933{
12934 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12935 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
12936}
12937
12938
12939/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
12940FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
12941{
12942 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12943 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
12944}
12945
12946/* Opcode 0xf3 0x0f 0xde - invalid */
12947/* Opcode 0xf2 0x0f 0xde - invalid */
12948
12949
12950/** Opcode 0x0f 0xdf - pandn Pq, Qq */
12951FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
12952{
12953 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12954 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
12955}
12956
12957
12958/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
12959FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
12960{
12961 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12962 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
12963}
12964
12965
12966/* Opcode 0xf3 0x0f 0xdf - invalid */
12967/* Opcode 0xf2 0x0f 0xdf - invalid */
12968
12969/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
12970FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
12971{
12972 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12973 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
12974}
12975
12976
12977/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
12978FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
12979{
12980 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12981 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
12982}
12983
12984
12985/* Opcode 0xf3 0x0f 0xe0 - invalid */
12986/* Opcode 0xf2 0x0f 0xe0 - invalid */
12987
12988/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
12989FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
12990{
12991 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12992 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
12993}
12994
12995
12996/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
12997FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
12998{
12999 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13000 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13001}
13002
13003
13004/* Opcode 0xf3 0x0f 0xe1 - invalid */
13005/* Opcode 0xf2 0x0f 0xe1 - invalid */
13006
13007/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13008FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13009{
13010 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13011 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13012}
13013
13014
13015/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13016FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13017{
13018 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13019 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13020}
13021
13022
13023/* Opcode 0xf3 0x0f 0xe2 - invalid */
13024/* Opcode 0xf2 0x0f 0xe2 - invalid */
13025
13026/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13027FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13028{
13029 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13030 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13031}
13032
13033
13034/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13035FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13036{
13037 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13038 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13039}
13040
13041
13042/* Opcode 0xf3 0x0f 0xe3 - invalid */
13043/* Opcode 0xf2 0x0f 0xe3 - invalid */
13044
13045/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13046FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13047{
13048 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13049 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13050}
13051
13052
13053/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13054FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13055{
13056 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13057 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13058}
13059
13060
13061/* Opcode 0xf3 0x0f 0xe4 - invalid */
13062/* Opcode 0xf2 0x0f 0xe4 - invalid */
13063
13064/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13065FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13066{
13067 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13068 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
13069}
13070
13071
13072/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13073FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13074{
13075 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13076 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
13077}
13078
13079
13080/* Opcode 0xf3 0x0f 0xe5 - invalid */
13081/* Opcode 0xf2 0x0f 0xe5 - invalid */
13082/* Opcode 0x0f 0xe6 - invalid */
13083
13084
13085/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13086FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13087{
13088 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13089 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13090}
13091
13092
13093/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13094FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13095{
13096 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13097 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13098}
13099
13100
13101/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13102FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13103{
13104 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13105 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13106}
13107
13108
13109/**
13110 * @opcode 0xe7
13111 * @opcodesub !11 mr/reg
13112 * @oppfx none
13113 * @opcpuid sse
13114 * @opgroup og_sse1_cachect
13115 * @opxcpttype none
13116 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13117 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13118 */
13119FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13120{
13121 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13123 if (IEM_IS_MODRM_MEM_MODE(bRm))
13124 {
13125 /* Register, memory. */
13126 IEM_MC_BEGIN(0, 2);
13127 IEM_MC_LOCAL(uint64_t, uSrc);
13128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13129
13130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13132 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13133 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13134 IEM_MC_FPU_TO_MMX_MODE();
13135
13136 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13137 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13138
13139 IEM_MC_ADVANCE_RIP_AND_FINISH();
13140 IEM_MC_END();
13141 }
13142 /**
13143 * @opdone
13144 * @opmnemonic ud0fe7reg
13145 * @opcode 0xe7
13146 * @opcodesub 11 mr/reg
13147 * @oppfx none
13148 * @opunused immediate
13149 * @opcpuid sse
13150 * @optest ->
13151 */
13152 else
13153 return IEMOP_RAISE_INVALID_OPCODE();
13154}
13155
13156/**
13157 * @opcode 0xe7
13158 * @opcodesub !11 mr/reg
13159 * @oppfx 0x66
13160 * @opcpuid sse2
13161 * @opgroup og_sse2_cachect
13162 * @opxcpttype 1
13163 * @optest op1=-1 op2=2 -> op1=2
13164 * @optest op1=0 op2=-42 -> op1=-42
13165 */
13166FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13167{
13168 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13169 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13170 if (IEM_IS_MODRM_MEM_MODE(bRm))
13171 {
13172 /* Register, memory. */
13173 IEM_MC_BEGIN(0, 2);
13174 IEM_MC_LOCAL(RTUINT128U, uSrc);
13175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13176
13177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13179 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
13180 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13181
13182 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13183 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13184
13185 IEM_MC_ADVANCE_RIP_AND_FINISH();
13186 IEM_MC_END();
13187 }
13188
13189 /**
13190 * @opdone
13191 * @opmnemonic ud660fe7reg
13192 * @opcode 0xe7
13193 * @opcodesub 11 mr/reg
13194 * @oppfx 0x66
13195 * @opunused immediate
13196 * @opcpuid sse
13197 * @optest ->
13198 */
13199 else
13200 return IEMOP_RAISE_INVALID_OPCODE();
13201}
13202
13203/* Opcode 0xf3 0x0f 0xe7 - invalid */
13204/* Opcode 0xf2 0x0f 0xe7 - invalid */
13205
13206
13207/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13208FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13209{
13210 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13211 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
13212}
13213
13214
13215/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13216FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13217{
13218 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13219 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
13220}
13221
13222
13223/* Opcode 0xf3 0x0f 0xe8 - invalid */
13224/* Opcode 0xf2 0x0f 0xe8 - invalid */
13225
13226/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13227FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13228{
13229 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13230 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
13231}
13232
13233
13234/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13235FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13236{
13237 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13238 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
13239}
13240
13241
13242/* Opcode 0xf3 0x0f 0xe9 - invalid */
13243/* Opcode 0xf2 0x0f 0xe9 - invalid */
13244
13245
13246/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13247FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13248{
13249 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13250 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
13251}
13252
13253
13254/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13255FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13256{
13257 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13258 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
13259}
13260
13261
13262/* Opcode 0xf3 0x0f 0xea - invalid */
13263/* Opcode 0xf2 0x0f 0xea - invalid */
13264
13265
13266/** Opcode 0x0f 0xeb - por Pq, Qq */
13267FNIEMOP_DEF(iemOp_por_Pq_Qq)
13268{
13269 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13270 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
13271}
13272
13273
13274/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13275FNIEMOP_DEF(iemOp_por_Vx_Wx)
13276{
13277 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13278 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
13279}
13280
13281
13282/* Opcode 0xf3 0x0f 0xeb - invalid */
13283/* Opcode 0xf2 0x0f 0xeb - invalid */
13284
13285/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13286FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13287{
13288 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13289 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
13290}
13291
13292
13293/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13294FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13295{
13296 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13297 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
13298}
13299
13300
13301/* Opcode 0xf3 0x0f 0xec - invalid */
13302/* Opcode 0xf2 0x0f 0xec - invalid */
13303
13304/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13305FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13306{
13307 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13308 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
13309}
13310
13311
13312/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13313FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13314{
13315 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13316 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
13317}
13318
13319
13320/* Opcode 0xf3 0x0f 0xed - invalid */
13321/* Opcode 0xf2 0x0f 0xed - invalid */
13322
13323
13324/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13325FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13326{
13327 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13328 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13329}
13330
13331
13332/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13333FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13334{
13335 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13336 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13337}
13338
13339
13340/* Opcode 0xf3 0x0f 0xee - invalid */
13341/* Opcode 0xf2 0x0f 0xee - invalid */
13342
13343
13344/** Opcode 0x0f 0xef - pxor Pq, Qq */
13345FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13346{
13347 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13348 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
13349}
13350
13351
13352/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13353FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13354{
13355 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13356 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
13357}
13358
13359
13360/* Opcode 0xf3 0x0f 0xef - invalid */
13361/* Opcode 0xf2 0x0f 0xef - invalid */
13362
13363/* Opcode 0x0f 0xf0 - invalid */
13364/* Opcode 0x66 0x0f 0xf0 - invalid */
13365
13366
13367/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13368FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13369{
13370 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13372 if (IEM_IS_MODRM_REG_MODE(bRm))
13373 {
13374 /*
13375 * Register, register - (not implemented, assuming it raises \#UD).
13376 */
13377 return IEMOP_RAISE_INVALID_OPCODE();
13378 }
13379 else
13380 {
13381 /*
13382 * Register, memory.
13383 */
13384 IEM_MC_BEGIN(0, 2);
13385 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13387
13388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13390 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
13391 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13392 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13393 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13394
13395 IEM_MC_ADVANCE_RIP_AND_FINISH();
13396 IEM_MC_END();
13397 }
13398}
13399
13400
13401/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13402FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13403{
13404 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13405 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13406}
13407
13408
13409/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13410FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13411{
13412 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13413 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13414}
13415
13416
13417/* Opcode 0xf2 0x0f 0xf1 - invalid */
13418
13419/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13420FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13421{
13422 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13423 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13424}
13425
13426
13427/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13428FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13429{
13430 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13431 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13432}
13433
13434
13435/* Opcode 0xf2 0x0f 0xf2 - invalid */
13436
13437/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13438FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13439{
13440 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13441 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13442}
13443
13444
13445/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13446FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13447{
13448 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13449 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13450}
13451
13452/* Opcode 0xf2 0x0f 0xf3 - invalid */
13453
13454/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13455FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13456{
13457 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13458 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
13459}
13460
13461
13462/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13463FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13464{
13465 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13466 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
13467}
13468
13469
13470/* Opcode 0xf2 0x0f 0xf4 - invalid */
13471
13472/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13473FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13474{
13475 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13476 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13477}
13478
13479
13480/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13481FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13482{
13483 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13484 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13485}
13486
13487/* Opcode 0xf2 0x0f 0xf5 - invalid */
13488
13489/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13490FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13491{
13492 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13493 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13494}
13495
13496
13497/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13498FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13499{
13500 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13501 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13502}
13503
13504
13505/* Opcode 0xf2 0x0f 0xf6 - invalid */
13506
13507/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13508FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13509/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13510FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13511/* Opcode 0xf2 0x0f 0xf7 - invalid */
13512
13513
13514/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13515FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13516{
13517 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13518 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
13519}
13520
13521
13522/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13523FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13524{
13525 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13526 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
13527}
13528
13529
13530/* Opcode 0xf2 0x0f 0xf8 - invalid */
13531
13532
13533/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13534FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13535{
13536 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13537 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
13538}
13539
13540
13541/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13542FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13543{
13544 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13545 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
13546}
13547
13548
13549/* Opcode 0xf2 0x0f 0xf9 - invalid */
13550
13551
13552/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13553FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13554{
13555 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13556 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
13557}
13558
13559
13560/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13561FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13562{
13563 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13564 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
13565}
13566
13567
13568/* Opcode 0xf2 0x0f 0xfa - invalid */
13569
13570
13571/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13572FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13573{
13574 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13575 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_psubq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
13576}
13577
13578
13579/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13580FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13581{
13582 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13583 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
13584}
13585
13586
13587/* Opcode 0xf2 0x0f 0xfb - invalid */
13588
13589
13590/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13591FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13592{
13593 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13594 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
13595}
13596
13597
13598/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13599FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13600{
13601 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13602 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
13603}
13604
13605
13606/* Opcode 0xf2 0x0f 0xfc - invalid */
13607
13608
13609/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13610FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13611{
13612 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13613 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
13614}
13615
13616
13617/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
13618FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
13619{
13620 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13621 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
13622}
13623
13624
13625/* Opcode 0xf2 0x0f 0xfd - invalid */
13626
13627
13628/** Opcode 0x0f 0xfe - paddd Pq, Qq */
13629FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
13630{
13631 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13632 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
13633}
13634
13635
13636/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
13637FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
13638{
13639 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13640 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
13641}
13642
13643
13644/* Opcode 0xf2 0x0f 0xfe - invalid */
13645
13646
13647/** Opcode **** 0x0f 0xff - UD0 */
13648FNIEMOP_DEF(iemOp_ud0)
13649{
13650 IEMOP_MNEMONIC(ud0, "ud0");
13651 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
13652 {
13653 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
13654#ifndef TST_IEM_CHECK_MC
13655 if (IEM_IS_MODRM_MEM_MODE(bRm))
13656 {
13657 RTGCPTR GCPtrEff;
13658 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
13659 if (rcStrict != VINF_SUCCESS)
13660 return rcStrict;
13661 }
13662#endif
13663 IEMOP_HLP_DONE_DECODING();
13664 }
13665 return IEMOP_RAISE_INVALID_OPCODE();
13666}
13667
13668
13669
13670/**
13671 * Two byte opcode map, first byte 0x0f.
13672 *
13673 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
13674 * check if it needs updating as well when making changes.
13675 */
13676IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
13677{
13678 /* no prefix, 066h prefix f3h prefix, f2h prefix */
13679 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
13680 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
13681 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
13682 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
13683 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
13684 /* 0x05 */ IEMOP_X4(iemOp_syscall),
13685 /* 0x06 */ IEMOP_X4(iemOp_clts),
13686 /* 0x07 */ IEMOP_X4(iemOp_sysret),
13687 /* 0x08 */ IEMOP_X4(iemOp_invd),
13688 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
13689 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
13690 /* 0x0b */ IEMOP_X4(iemOp_ud2),
13691 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
13692 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
13693 /* 0x0e */ IEMOP_X4(iemOp_femms),
13694 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
13695
13696 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
13697 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
13698 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
13699 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13700 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13701 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13702 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
13703 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13704 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
13705 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
13706 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
13707 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
13708 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
13709 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
13710 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
13711 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
13712
13713 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
13714 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
13715 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
13716 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
13717 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
13718 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13719 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
13720 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13721 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13722 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13723 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
13724 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13725 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
13726 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
13727 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13728 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13729
13730 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
13731 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
13732 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
13733 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
13734 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
13735 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
13736 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
13737 /* 0x37 */ IEMOP_X4(iemOp_getsec),
13738 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
13739 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13740 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
13741 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13742 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13743 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13744 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13745 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13746
13747 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
13748 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
13749 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
13750 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
13751 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
13752 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
13753 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
13754 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
13755 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
13756 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
13757 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
13758 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
13759 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
13760 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
13761 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
13762 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
13763
13764 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13765 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
13766 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
13767 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
13768 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13769 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13770 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13771 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13772 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
13773 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
13774 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
13775 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
13776 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
13777 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
13778 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
13779 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
13780
13781 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13782 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13783 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13784 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13785 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13786 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13787 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13788 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13789 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13790 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13791 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13792 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13793 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13794 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13795 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13796 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
13797
13798 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
13799 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
13800 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
13801 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
13802 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13803 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13804 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13805 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13806
13807 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13808 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13809 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13810 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13811 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
13812 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
13813 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
13814 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
13815
13816 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
13817 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
13818 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
13819 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
13820 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
13821 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
13822 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
13823 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
13824 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
13825 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
13826 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
13827 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
13828 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
13829 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
13830 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
13831 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
13832
13833 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
13834 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
13835 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
13836 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
13837 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
13838 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
13839 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
13840 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
13841 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
13842 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
13843 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
13844 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
13845 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
13846 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
13847 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
13848 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
13849
13850 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
13851 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
13852 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
13853 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
13854 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
13855 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
13856 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
13857 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
13858 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
13859 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
13860 /* 0xaa */ IEMOP_X4(iemOp_rsm),
13861 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
13862 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
13863 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
13864 /* 0xae */ IEMOP_X4(iemOp_Grp15),
13865 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
13866
13867 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
13868 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
13869 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
13870 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
13871 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
13872 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
13873 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
13874 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
13875 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
13876 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
13877 /* 0xba */ IEMOP_X4(iemOp_Grp8),
13878 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
13879 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
13880 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
13881 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
13882 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
13883
13884 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
13885 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
13886 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
13887 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13888 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13889 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13890 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13891 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
13892 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
13893 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
13894 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
13895 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
13896 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
13897 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
13898 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
13899 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
13900
13901 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
13902 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13903 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13904 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13905 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13906 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13907 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
13908 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13909 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13910 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13911 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13912 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13913 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13914 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13915 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13916 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13917
13918 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13919 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13920 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13921 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13922 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13923 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13924 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
13925 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13926 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13927 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13928 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13929 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13930 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13931 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13932 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13933 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13934
13935 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
13936 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13937 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13938 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13939 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13940 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13941 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13942 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13943 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13944 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13945 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13946 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13947 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13948 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13949 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13950 /* 0xff */ IEMOP_X4(iemOp_ud0),
13951};
13952AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
13953
13954/** @} */
13955
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette