VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h@ 100854

Last change on this file since 100854 was 100854, checked in by vboxsync, 19 months ago

VMM/IEM: In order to get rid of most impossible threaded functions, an IEM_MC_F_XXX parameter is added to IEM_MC_BEGIN that allows specifying if a block is only for 64-bit mode or 386+ or not for 286 or older. It can be extended with more info later, as needed. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 519.3 KB
Line 
1/* $Id: IEMAllInstTwoByte0f.cpp.h 100854 2023-08-11 01:29:04Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * MMX, MMX.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEM_MC_BEGIN(2, 0, 0);
53 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_FPU_TO_MMX_MODE();
59
60 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
61 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
62 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
63 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
64
65 IEM_MC_ADVANCE_RIP_AND_FINISH();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * MMX, [mem64].
72 */
73 IEM_MC_BEGIN(2, 2, 0);
74 IEM_MC_ARG(uint64_t *, pDst, 0);
75 IEM_MC_LOCAL(uint64_t, uSrc);
76 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
81 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_FPU_USAGE();
85 IEM_MC_FPU_TO_MMX_MODE();
86
87 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
88 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
89 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
90
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94}
95
96
97/**
98 * Common worker for MMX instructions on the form:
99 * pxxx mm1, mm2/mem64
100 *
101 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
102 * no FXSAVE state, just the operands.
103 */
104FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
105{
106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
107 if (IEM_IS_MODRM_REG_MODE(bRm))
108 {
109 /*
110 * MMX, MMX.
111 */
112 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
113 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
114 IEM_MC_BEGIN(2, 0, 0);
115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
116 IEM_MC_ARG(uint64_t *, pDst, 0);
117 IEM_MC_ARG(uint64_t const *, pSrc, 1);
118 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
119 IEM_MC_PREPARE_FPU_USAGE();
120 IEM_MC_FPU_TO_MMX_MODE();
121
122 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
123 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
124 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
125 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
126
127 IEM_MC_ADVANCE_RIP_AND_FINISH();
128 IEM_MC_END();
129 }
130 else
131 {
132 /*
133 * MMX, [mem64].
134 */
135 IEM_MC_BEGIN(2, 2, 0);
136 IEM_MC_ARG(uint64_t *, pDst, 0);
137 IEM_MC_LOCAL(uint64_t, uSrc);
138 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
140
141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
143 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
144 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
145
146 IEM_MC_PREPARE_FPU_USAGE();
147 IEM_MC_FPU_TO_MMX_MODE();
148
149 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
150 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
151 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
152
153 IEM_MC_ADVANCE_RIP_AND_FINISH();
154 IEM_MC_END();
155 }
156}
157
158
159/**
160 * Common worker for MMX instructions on the form:
161 * pxxx mm1, mm2/mem64
162 * for instructions introduced with SSE.
163 */
164FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
165{
166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
167 if (IEM_IS_MODRM_REG_MODE(bRm))
168 {
169 /*
170 * MMX, MMX.
171 */
172 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
173 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
174 IEM_MC_BEGIN(2, 0, 0);
175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
176 IEM_MC_ARG(uint64_t *, pDst, 0);
177 IEM_MC_ARG(uint64_t const *, pSrc, 1);
178 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
179 IEM_MC_PREPARE_FPU_USAGE();
180 IEM_MC_FPU_TO_MMX_MODE();
181
182 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
183 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
184 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
185 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
186
187 IEM_MC_ADVANCE_RIP_AND_FINISH();
188 IEM_MC_END();
189 }
190 else
191 {
192 /*
193 * MMX, [mem64].
194 */
195 IEM_MC_BEGIN(2, 2, 0);
196 IEM_MC_ARG(uint64_t *, pDst, 0);
197 IEM_MC_LOCAL(uint64_t, uSrc);
198 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200
201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
203 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
204 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
205
206 IEM_MC_PREPARE_FPU_USAGE();
207 IEM_MC_FPU_TO_MMX_MODE();
208
209 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
210 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
211 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
212
213 IEM_MC_ADVANCE_RIP_AND_FINISH();
214 IEM_MC_END();
215 }
216}
217
218
219/**
220 * Common worker for MMX instructions on the form:
221 * pxxx mm1, mm2/mem64
222 * for instructions introduced with SSE.
223 *
224 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
225 * no FXSAVE state, just the operands.
226 */
227FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
228{
229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
230 if (IEM_IS_MODRM_REG_MODE(bRm))
231 {
232 /*
233 * MMX, MMX.
234 */
235 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
236 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
237 IEM_MC_BEGIN(2, 0, 0);
238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
239 IEM_MC_ARG(uint64_t *, pDst, 0);
240 IEM_MC_ARG(uint64_t const *, pSrc, 1);
241 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
242 IEM_MC_PREPARE_FPU_USAGE();
243 IEM_MC_FPU_TO_MMX_MODE();
244
245 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
246 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
247 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
248 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
249
250 IEM_MC_ADVANCE_RIP_AND_FINISH();
251 IEM_MC_END();
252 }
253 else
254 {
255 /*
256 * MMX, [mem64].
257 */
258 IEM_MC_BEGIN(2, 2, 0);
259 IEM_MC_ARG(uint64_t *, pDst, 0);
260 IEM_MC_LOCAL(uint64_t, uSrc);
261 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
263
264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
267 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
268
269 IEM_MC_PREPARE_FPU_USAGE();
270 IEM_MC_FPU_TO_MMX_MODE();
271
272 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
273 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
274 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
275
276 IEM_MC_ADVANCE_RIP_AND_FINISH();
277 IEM_MC_END();
278 }
279}
280
281
282/**
283 * Common worker for MMX instructions on the form:
284 * pxxx mm1, mm2/mem64
285 * that was introduced with SSE2.
286 */
287FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAF2U64, pfnU64)
288{
289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
290 if (IEM_IS_MODRM_REG_MODE(bRm))
291 {
292 /*
293 * MMX, MMX.
294 */
295 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
296 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
297 IEM_MC_BEGIN(2, 0, 0);
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_ARG(uint64_t const *, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
302 IEM_MC_PREPARE_FPU_USAGE();
303 IEM_MC_FPU_TO_MMX_MODE();
304
305 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
306 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
307 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
308 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
309
310 IEM_MC_ADVANCE_RIP_AND_FINISH();
311 IEM_MC_END();
312 }
313 else
314 {
315 /*
316 * MMX, [mem64].
317 */
318 IEM_MC_BEGIN(2, 2, 0);
319 IEM_MC_ARG(uint64_t *, pDst, 0);
320 IEM_MC_LOCAL(uint64_t, uSrc);
321 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
323
324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
326 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
327 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
328
329 IEM_MC_PREPARE_FPU_USAGE();
330 IEM_MC_FPU_TO_MMX_MODE();
331
332 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
333 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
334 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
335
336 IEM_MC_ADVANCE_RIP_AND_FINISH();
337 IEM_MC_END();
338 }
339}
340
341
342/**
343 * Common worker for SSE instructions of the form:
344 * pxxx xmm1, xmm2/mem128
345 *
346 * Proper alignment of the 128-bit operand is enforced.
347 * SSE cpuid checks. No SIMD FP exceptions.
348 *
349 * @sa iemOpCommonSse2_FullFull_To_Full
350 */
351FNIEMOP_DEF_1(iemOpCommonSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
352{
353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
354 if (IEM_IS_MODRM_REG_MODE(bRm))
355 {
356 /*
357 * XMM, XMM.
358 */
359 IEM_MC_BEGIN(2, 0, 0);
360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
361 IEM_MC_ARG(PRTUINT128U, pDst, 0);
362 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
364 IEM_MC_PREPARE_SSE_USAGE();
365 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
366 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
367 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
368 IEM_MC_ADVANCE_RIP_AND_FINISH();
369 IEM_MC_END();
370 }
371 else
372 {
373 /*
374 * XMM, [mem128].
375 */
376 IEM_MC_BEGIN(2, 2, 0);
377 IEM_MC_ARG(PRTUINT128U, pDst, 0);
378 IEM_MC_LOCAL(RTUINT128U, uSrc);
379 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
381
382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
384 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
385 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
386
387 IEM_MC_PREPARE_SSE_USAGE();
388 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
389 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
390
391 IEM_MC_ADVANCE_RIP_AND_FINISH();
392 IEM_MC_END();
393 }
394}
395
396
397/**
398 * Common worker for SSE2 instructions on the forms:
399 * pxxx xmm1, xmm2/mem128
400 *
401 * Proper alignment of the 128-bit operand is enforced.
402 * Exceptions type 4. SSE2 cpuid checks.
403 *
404 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
405 */
406FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
407{
408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
409 if (IEM_IS_MODRM_REG_MODE(bRm))
410 {
411 /*
412 * XMM, XMM.
413 */
414 IEM_MC_BEGIN(2, 0, 0);
415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
418 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
419 IEM_MC_PREPARE_SSE_USAGE();
420 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
421 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
422 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
423 IEM_MC_ADVANCE_RIP_AND_FINISH();
424 IEM_MC_END();
425 }
426 else
427 {
428 /*
429 * XMM, [mem128].
430 */
431 IEM_MC_BEGIN(2, 2, 0);
432 IEM_MC_ARG(PRTUINT128U, pDst, 0);
433 IEM_MC_LOCAL(RTUINT128U, uSrc);
434 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
436
437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
439 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
440 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
441
442 IEM_MC_PREPARE_SSE_USAGE();
443 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
444 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
445
446 IEM_MC_ADVANCE_RIP_AND_FINISH();
447 IEM_MC_END();
448 }
449}
450
451
452/**
453 * Common worker for SSE2 instructions on the forms:
454 * pxxx xmm1, xmm2/mem128
455 *
456 * Proper alignment of the 128-bit operand is enforced.
457 * Exceptions type 4. SSE2 cpuid checks.
458 *
459 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
460 * no FXSAVE state, just the operands.
461 *
462 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
463 */
464FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
465{
466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
467 if (IEM_IS_MODRM_REG_MODE(bRm))
468 {
469 /*
470 * XMM, XMM.
471 */
472 IEM_MC_BEGIN(2, 0, 0);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
474 IEM_MC_ARG(PRTUINT128U, pDst, 0);
475 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
477 IEM_MC_PREPARE_SSE_USAGE();
478 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
479 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
480 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
481 IEM_MC_ADVANCE_RIP_AND_FINISH();
482 IEM_MC_END();
483 }
484 else
485 {
486 /*
487 * XMM, [mem128].
488 */
489 IEM_MC_BEGIN(2, 2, 0);
490 IEM_MC_ARG(PRTUINT128U, pDst, 0);
491 IEM_MC_LOCAL(RTUINT128U, uSrc);
492 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
494
495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
498 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
499
500 IEM_MC_PREPARE_SSE_USAGE();
501 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
502 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
503
504 IEM_MC_ADVANCE_RIP_AND_FINISH();
505 IEM_MC_END();
506 }
507}
508
509
510/**
511 * Common worker for MMX instructions on the forms:
512 * pxxxx mm1, mm2/mem32
513 *
514 * The 2nd operand is the first half of a register, which in the memory case
515 * means a 32-bit memory access.
516 */
517FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
518{
519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
520 if (IEM_IS_MODRM_REG_MODE(bRm))
521 {
522 /*
523 * MMX, MMX.
524 */
525 IEM_MC_BEGIN(2, 0, 0);
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
527 IEM_MC_ARG(uint64_t *, puDst, 0);
528 IEM_MC_ARG(uint64_t const *, puSrc, 1);
529 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
530 IEM_MC_PREPARE_FPU_USAGE();
531 IEM_MC_FPU_TO_MMX_MODE();
532
533 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
534 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
535 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
536 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
537
538 IEM_MC_ADVANCE_RIP_AND_FINISH();
539 IEM_MC_END();
540 }
541 else
542 {
543 /*
544 * MMX, [mem32].
545 */
546 IEM_MC_BEGIN(2, 2, 0);
547 IEM_MC_ARG(uint64_t *, puDst, 0);
548 IEM_MC_LOCAL(uint64_t, uSrc);
549 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
551
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
554 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
555 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
556
557 IEM_MC_PREPARE_FPU_USAGE();
558 IEM_MC_FPU_TO_MMX_MODE();
559
560 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
562 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
563
564 IEM_MC_ADVANCE_RIP_AND_FINISH();
565 IEM_MC_END();
566 }
567}
568
569
570/**
571 * Common worker for SSE instructions on the forms:
572 * pxxxx xmm1, xmm2/mem128
573 *
574 * The 2nd operand is the first half of a register, which in the memory case
575 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
576 *
577 * Exceptions type 4.
578 */
579FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
580{
581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
582 if (IEM_IS_MODRM_REG_MODE(bRm))
583 {
584 /*
585 * XMM, XMM.
586 */
587 IEM_MC_BEGIN(2, 0, 0);
588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
589 IEM_MC_ARG(PRTUINT128U, puDst, 0);
590 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
591 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
594 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
595 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
596 IEM_MC_ADVANCE_RIP_AND_FINISH();
597 IEM_MC_END();
598 }
599 else
600 {
601 /*
602 * XMM, [mem128].
603 */
604 IEM_MC_BEGIN(2, 2, 0);
605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
606 IEM_MC_LOCAL(RTUINT128U, uSrc);
607 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
609
610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
612 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
613 /** @todo Most CPUs probably only read the low qword. We read everything to
614 * make sure we apply segmentation and alignment checks correctly.
615 * When we have time, it would be interesting to explore what real
616 * CPUs actually does and whether it will do a TLB load for the high
617 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
618 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
619
620 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
621 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
623
624 IEM_MC_ADVANCE_RIP_AND_FINISH();
625 IEM_MC_END();
626 }
627}
628
629
630/**
631 * Common worker for SSE2 instructions on the forms:
632 * pxxxx xmm1, xmm2/mem128
633 *
634 * The 2nd operand is the first half of a register, which in the memory case
635 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
636 *
637 * Exceptions type 4.
638 */
639FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
640{
641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
642 if (IEM_IS_MODRM_REG_MODE(bRm))
643 {
644 /*
645 * XMM, XMM.
646 */
647 IEM_MC_BEGIN(2, 0, 0);
648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
649 IEM_MC_ARG(PRTUINT128U, puDst, 0);
650 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
651 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
653 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
654 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
655 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
656 IEM_MC_ADVANCE_RIP_AND_FINISH();
657 IEM_MC_END();
658 }
659 else
660 {
661 /*
662 * XMM, [mem128].
663 */
664 IEM_MC_BEGIN(2, 2, 0);
665 IEM_MC_ARG(PRTUINT128U, puDst, 0);
666 IEM_MC_LOCAL(RTUINT128U, uSrc);
667 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
669
670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
672 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
673 /** @todo Most CPUs probably only read the low qword. We read everything to
674 * make sure we apply segmentation and alignment checks correctly.
675 * When we have time, it would be interesting to explore what real
676 * CPUs actually does and whether it will do a TLB load for the high
677 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
678 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
679
680 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
681 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
682 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
683
684 IEM_MC_ADVANCE_RIP_AND_FINISH();
685 IEM_MC_END();
686 }
687}
688
689
690/**
691 * Common worker for MMX instructions on the form:
692 * pxxxx mm1, mm2/mem64
693 *
694 * The 2nd operand is the second half of a register, which in the memory case
695 * means a 64-bit memory access for MMX.
696 */
697FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
698{
699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
700 if (IEM_IS_MODRM_REG_MODE(bRm))
701 {
702 /*
703 * MMX, MMX.
704 */
705 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
706 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
707 IEM_MC_BEGIN(2, 0, 0);
708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
709 IEM_MC_ARG(uint64_t *, puDst, 0);
710 IEM_MC_ARG(uint64_t const *, puSrc, 1);
711 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
712 IEM_MC_PREPARE_FPU_USAGE();
713 IEM_MC_FPU_TO_MMX_MODE();
714
715 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
716 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
717 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
718 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
719
720 IEM_MC_ADVANCE_RIP_AND_FINISH();
721 IEM_MC_END();
722 }
723 else
724 {
725 /*
726 * MMX, [mem64].
727 */
728 IEM_MC_BEGIN(2, 2, 0);
729 IEM_MC_ARG(uint64_t *, puDst, 0);
730 IEM_MC_LOCAL(uint64_t, uSrc);
731 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
733
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
736 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
737 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
738
739 IEM_MC_PREPARE_FPU_USAGE();
740 IEM_MC_FPU_TO_MMX_MODE();
741
742 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
743 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
744 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
745
746 IEM_MC_ADVANCE_RIP_AND_FINISH();
747 IEM_MC_END();
748 }
749}
750
751
752/**
753 * Common worker for SSE instructions on the form:
754 * pxxxx xmm1, xmm2/mem128
755 *
756 * The 2nd operand is the second half of a register, which for SSE a 128-bit
757 * aligned access where it may read the full 128 bits or only the upper 64 bits.
758 *
759 * Exceptions type 4.
760 */
761FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
762{
763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
764 if (IEM_IS_MODRM_REG_MODE(bRm))
765 {
766 /*
767 * XMM, XMM.
768 */
769 IEM_MC_BEGIN(2, 0, 0);
770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
771 IEM_MC_ARG(PRTUINT128U, puDst, 0);
772 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
773 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
774 IEM_MC_PREPARE_SSE_USAGE();
775 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
776 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
777 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
778 IEM_MC_ADVANCE_RIP_AND_FINISH();
779 IEM_MC_END();
780 }
781 else
782 {
783 /*
784 * XMM, [mem128].
785 */
786 IEM_MC_BEGIN(2, 2, 0);
787 IEM_MC_ARG(PRTUINT128U, puDst, 0);
788 IEM_MC_LOCAL(RTUINT128U, uSrc);
789 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
791
792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
794 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
795 /** @todo Most CPUs probably only read the high qword. We read everything to
796 * make sure we apply segmentation and alignment checks correctly.
797 * When we have time, it would be interesting to explore what real
798 * CPUs actually does and whether it will do a TLB load for the lower
799 * part or skip any associated \#PF. */
800 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801
802 IEM_MC_PREPARE_SSE_USAGE();
803 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
804 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
805
806 IEM_MC_ADVANCE_RIP_AND_FINISH();
807 IEM_MC_END();
808 }
809}
810
811
812/**
813 * Common worker for SSE instructions on the forms:
814 * pxxs xmm1, xmm2/mem128
815 *
816 * Proper alignment of the 128-bit operand is enforced.
817 * Exceptions type 2. SSE cpuid checks.
818 *
819 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
820 */
821FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
822{
823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
824 if (IEM_IS_MODRM_REG_MODE(bRm))
825 {
826 /*
827 * XMM128, XMM128.
828 */
829 IEM_MC_BEGIN(3, 1, 0);
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
831 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
832 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
833 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
834 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
835 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
836 IEM_MC_PREPARE_SSE_USAGE();
837 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
838 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
839 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
840 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
841 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
842
843 IEM_MC_ADVANCE_RIP_AND_FINISH();
844 IEM_MC_END();
845 }
846 else
847 {
848 /*
849 * XMM128, [mem128].
850 */
851 IEM_MC_BEGIN(3, 2, 0);
852 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
853 IEM_MC_LOCAL(X86XMMREG, uSrc2);
854 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
855 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
856 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
858
859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
861 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
862 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
863
864 IEM_MC_PREPARE_SSE_USAGE();
865 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
866 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
867 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
868 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
869
870 IEM_MC_ADVANCE_RIP_AND_FINISH();
871 IEM_MC_END();
872 }
873}
874
875
876/**
877 * Common worker for SSE instructions on the forms:
878 * pxxs xmm1, xmm2/mem32
879 *
880 * Proper alignment of the 128-bit operand is enforced.
881 * Exceptions type 2. SSE cpuid checks.
882 *
883 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
884 */
885FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
886{
887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
888 if (IEM_IS_MODRM_REG_MODE(bRm))
889 {
890 /*
891 * XMM128, XMM32.
892 */
893 IEM_MC_BEGIN(3, 1, 0);
894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
895 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
896 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
897 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
898 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
899 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
900 IEM_MC_PREPARE_SSE_USAGE();
901 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
902 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
903 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
904 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
905 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
906
907 IEM_MC_ADVANCE_RIP_AND_FINISH();
908 IEM_MC_END();
909 }
910 else
911 {
912 /*
913 * XMM128, [mem32].
914 */
915 IEM_MC_BEGIN(3, 2, 0);
916 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
917 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
918 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
919 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
920 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
922
923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
926 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
927
928 IEM_MC_PREPARE_SSE_USAGE();
929 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
930 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
931 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
932 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
933
934 IEM_MC_ADVANCE_RIP_AND_FINISH();
935 IEM_MC_END();
936 }
937}
938
939
940/**
941 * Common worker for SSE2 instructions on the forms:
942 * pxxd xmm1, xmm2/mem128
943 *
944 * Proper alignment of the 128-bit operand is enforced.
945 * Exceptions type 2. SSE cpuid checks.
946 *
947 * @sa iemOpCommonSseFp_FullFull_To_Full
948 */
949FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
950{
951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
952 if (IEM_IS_MODRM_REG_MODE(bRm))
953 {
954 /*
955 * XMM128, XMM128.
956 */
957 IEM_MC_BEGIN(3, 1, 0);
958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
959 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
960 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
961 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
962 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
963 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
964 IEM_MC_PREPARE_SSE_USAGE();
965 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
966 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
967 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
968 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
969 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
970
971 IEM_MC_ADVANCE_RIP_AND_FINISH();
972 IEM_MC_END();
973 }
974 else
975 {
976 /*
977 * XMM128, [mem128].
978 */
979 IEM_MC_BEGIN(3, 2, 0);
980 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
981 IEM_MC_LOCAL(X86XMMREG, uSrc2);
982 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
983 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
984 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
989 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
990 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
991
992 IEM_MC_PREPARE_SSE_USAGE();
993 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
994 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
995 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
996 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
997
998 IEM_MC_ADVANCE_RIP_AND_FINISH();
999 IEM_MC_END();
1000 }
1001}
1002
1003
1004/**
1005 * Common worker for SSE2 instructions on the forms:
1006 * pxxs xmm1, xmm2/mem64
1007 *
1008 * Proper alignment of the 128-bit operand is enforced.
1009 * Exceptions type 2. SSE2 cpuid checks.
1010 *
1011 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1012 */
1013FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
1014{
1015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1016 if (IEM_IS_MODRM_REG_MODE(bRm))
1017 {
1018 /*
1019 * XMM, XMM.
1020 */
1021 IEM_MC_BEGIN(3, 1, 0);
1022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1023 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1024 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1025 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1026 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
1027 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1028 IEM_MC_PREPARE_SSE_USAGE();
1029 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1031 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
1032 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1033 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1034
1035 IEM_MC_ADVANCE_RIP_AND_FINISH();
1036 IEM_MC_END();
1037 }
1038 else
1039 {
1040 /*
1041 * XMM, [mem64].
1042 */
1043 IEM_MC_BEGIN(3, 2, 0);
1044 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1045 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
1046 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1047 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1048 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
1049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1050
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1053 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1054 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1055
1056 IEM_MC_PREPARE_SSE_USAGE();
1057 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1058 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
1059 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1060 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1061
1062 IEM_MC_ADVANCE_RIP_AND_FINISH();
1063 IEM_MC_END();
1064 }
1065}
1066
1067
1068/**
1069 * Common worker for SSE2 instructions on the form:
1070 * pxxxx xmm1, xmm2/mem128
1071 *
1072 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1073 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1074 *
1075 * Exceptions type 4.
1076 */
1077FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1078{
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /*
1083 * XMM, XMM.
1084 */
1085 IEM_MC_BEGIN(2, 0, 0);
1086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1087 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1088 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1089 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1090 IEM_MC_PREPARE_SSE_USAGE();
1091 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1092 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1093 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1094 IEM_MC_ADVANCE_RIP_AND_FINISH();
1095 IEM_MC_END();
1096 }
1097 else
1098 {
1099 /*
1100 * XMM, [mem128].
1101 */
1102 IEM_MC_BEGIN(2, 2, 0);
1103 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1104 IEM_MC_LOCAL(RTUINT128U, uSrc);
1105 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1107
1108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1110 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1111 /** @todo Most CPUs probably only read the high qword. We read everything to
1112 * make sure we apply segmentation and alignment checks correctly.
1113 * When we have time, it would be interesting to explore what real
1114 * CPUs actually does and whether it will do a TLB load for the lower
1115 * part or skip any associated \#PF. */
1116 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1117
1118 IEM_MC_PREPARE_SSE_USAGE();
1119 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1120 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1121
1122 IEM_MC_ADVANCE_RIP_AND_FINISH();
1123 IEM_MC_END();
1124 }
1125}
1126
1127
1128/**
1129 * Common worker for SSE3 instructions on the forms:
1130 * hxxx xmm1, xmm2/mem128
1131 *
1132 * Proper alignment of the 128-bit operand is enforced.
1133 * Exceptions type 2. SSE3 cpuid checks.
1134 *
1135 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1136 */
1137FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1138{
1139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1140 if (IEM_IS_MODRM_REG_MODE(bRm))
1141 {
1142 /*
1143 * XMM, XMM.
1144 */
1145 IEM_MC_BEGIN(3, 1, 0);
1146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1147 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1148 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1149 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1150 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1151 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1152 IEM_MC_PREPARE_SSE_USAGE();
1153 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1154 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1155 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1156 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1157 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1158
1159 IEM_MC_ADVANCE_RIP_AND_FINISH();
1160 IEM_MC_END();
1161 }
1162 else
1163 {
1164 /*
1165 * XMM, [mem128].
1166 */
1167 IEM_MC_BEGIN(3, 2, 0);
1168 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1169 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1170 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1171 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1172 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1174
1175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1177 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1178 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1179
1180 IEM_MC_PREPARE_SSE_USAGE();
1181 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1182 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1183 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1184 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1185
1186 IEM_MC_ADVANCE_RIP_AND_FINISH();
1187 IEM_MC_END();
1188 }
1189}
1190
1191
1192/** Opcode 0x0f 0x00 /0. */
1193FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1194{
1195 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1196 IEMOP_HLP_MIN_286();
1197 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1198
1199 if (IEM_IS_MODRM_REG_MODE(bRm))
1200 {
1201 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1202 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1203 }
1204
1205 /* Ignore operand size here, memory refs are always 16-bit. */
1206 IEM_MC_BEGIN(2, 0, 0);
1207 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1208 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1210 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1211 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1212 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1213 IEM_MC_END();
1214}
1215
1216
1217/** Opcode 0x0f 0x00 /1. */
1218FNIEMOPRM_DEF(iemOp_Grp6_str)
1219{
1220 IEMOP_MNEMONIC(str, "str Rv/Mw");
1221 IEMOP_HLP_MIN_286();
1222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1223
1224
1225 if (IEM_IS_MODRM_REG_MODE(bRm))
1226 {
1227 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1228 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1229 }
1230
1231 /* Ignore operand size here, memory refs are always 16-bit. */
1232 IEM_MC_BEGIN(2, 0, 0);
1233 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1234 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1236 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1237 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1238 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1239 IEM_MC_END();
1240}
1241
1242
1243/** Opcode 0x0f 0x00 /2. */
1244FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1245{
1246 IEMOP_MNEMONIC(lldt, "lldt Ew");
1247 IEMOP_HLP_MIN_286();
1248 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1249
1250 if (IEM_IS_MODRM_REG_MODE(bRm))
1251 {
1252 IEM_MC_BEGIN(1, 0, 0);
1253 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1254 IEM_MC_ARG(uint16_t, u16Sel, 0);
1255 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1256 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_lldt, u16Sel);
1257 IEM_MC_END();
1258 }
1259 else
1260 {
1261 IEM_MC_BEGIN(1, 1, 0);
1262 IEM_MC_ARG(uint16_t, u16Sel, 0);
1263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1265 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1266 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1267 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1268 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_lldt, u16Sel);
1269 IEM_MC_END();
1270 }
1271}
1272
1273
1274/** Opcode 0x0f 0x00 /3. */
1275FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1276{
1277 IEMOP_MNEMONIC(ltr, "ltr Ew");
1278 IEMOP_HLP_MIN_286();
1279 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1280
1281 if (IEM_IS_MODRM_REG_MODE(bRm))
1282 {
1283 IEM_MC_BEGIN(1, 0, 0);
1284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1285 IEM_MC_ARG(uint16_t, u16Sel, 0);
1286 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1287 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_ltr, u16Sel);
1288 IEM_MC_END();
1289 }
1290 else
1291 {
1292 IEM_MC_BEGIN(1, 1, 0);
1293 IEM_MC_ARG(uint16_t, u16Sel, 0);
1294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1297 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1298 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1299 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_ltr, u16Sel);
1300 IEM_MC_END();
1301 }
1302}
1303
1304
1305/** Opcode 0x0f 0x00 /3. */
1306FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1307{
1308 IEMOP_HLP_MIN_286();
1309 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1310
1311 if (IEM_IS_MODRM_REG_MODE(bRm))
1312 {
1313 IEM_MC_BEGIN(2, 0, 0);
1314 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1315 IEM_MC_ARG(uint16_t, u16Sel, 0);
1316 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1317 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1318 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_VerX, u16Sel, fWriteArg);
1319 IEM_MC_END();
1320 }
1321 else
1322 {
1323 IEM_MC_BEGIN(2, 1, 0);
1324 IEM_MC_ARG(uint16_t, u16Sel, 0);
1325 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1328 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1329 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1330 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_VerX, u16Sel, fWriteArg);
1331 IEM_MC_END();
1332 }
1333}
1334
1335
1336/** Opcode 0x0f 0x00 /4. */
1337FNIEMOPRM_DEF(iemOp_Grp6_verr)
1338{
1339 IEMOP_MNEMONIC(verr, "verr Ew");
1340 IEMOP_HLP_MIN_286();
1341 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1342}
1343
1344
1345/** Opcode 0x0f 0x00 /5. */
1346FNIEMOPRM_DEF(iemOp_Grp6_verw)
1347{
1348 IEMOP_MNEMONIC(verw, "verw Ew");
1349 IEMOP_HLP_MIN_286();
1350 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1351}
1352
1353
1354/**
1355 * Group 6 jump table.
1356 */
1357IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1358{
1359 iemOp_Grp6_sldt,
1360 iemOp_Grp6_str,
1361 iemOp_Grp6_lldt,
1362 iemOp_Grp6_ltr,
1363 iemOp_Grp6_verr,
1364 iemOp_Grp6_verw,
1365 iemOp_InvalidWithRM,
1366 iemOp_InvalidWithRM
1367};
1368
1369/** Opcode 0x0f 0x00. */
1370FNIEMOP_DEF(iemOp_Grp6)
1371{
1372 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1373 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1374}
1375
1376
1377/** Opcode 0x0f 0x01 /0. */
1378FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1379{
1380 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1381 IEMOP_HLP_MIN_286();
1382 IEMOP_HLP_64BIT_OP_SIZE();
1383 IEM_MC_BEGIN(2, 1, 0);
1384 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1385 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1389 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1390 IEM_MC_END();
1391}
1392
1393
1394/** Opcode 0x0f 0x01 /0. */
1395FNIEMOP_DEF(iemOp_Grp7_vmcall)
1396{
1397 IEMOP_MNEMONIC(vmcall, "vmcall");
1398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1399
1400 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1401 want all hypercalls regardless of instruction used, and if a
1402 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1403 (NEM/win makes ASSUMPTIONS about this behavior.) */
1404 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_vmcall);
1405}
1406
1407
1408/** Opcode 0x0f 0x01 /0. */
1409#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1410FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1411{
1412 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1413 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1414 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1415 IEMOP_HLP_DONE_DECODING();
1416 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1417 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
1418 iemCImpl_vmlaunch);
1419}
1420#else
1421FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1422{
1423 IEMOP_BITCH_ABOUT_STUB();
1424 IEMOP_RAISE_INVALID_OPCODE_RET();
1425}
1426#endif
1427
1428
1429/** Opcode 0x0f 0x01 /0. */
1430#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1431FNIEMOP_DEF(iemOp_Grp7_vmresume)
1432{
1433 IEMOP_MNEMONIC(vmresume, "vmresume");
1434 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1435 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1436 IEMOP_HLP_DONE_DECODING();
1437 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1438 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
1439 iemCImpl_vmresume);
1440}
1441#else
1442FNIEMOP_DEF(iemOp_Grp7_vmresume)
1443{
1444 IEMOP_BITCH_ABOUT_STUB();
1445 IEMOP_RAISE_INVALID_OPCODE_RET();
1446}
1447#endif
1448
1449
1450/** Opcode 0x0f 0x01 /0. */
1451#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1452FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1453{
1454 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1455 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1456 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1457 IEMOP_HLP_DONE_DECODING();
1458 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmxoff);
1459}
1460#else
1461FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1462{
1463 IEMOP_BITCH_ABOUT_STUB();
1464 IEMOP_RAISE_INVALID_OPCODE_RET();
1465}
1466#endif
1467
1468
1469/** Opcode 0x0f 0x01 /1. */
1470FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1471{
1472 IEMOP_MNEMONIC(sidt, "sidt Ms");
1473 IEMOP_HLP_MIN_286();
1474 IEMOP_HLP_64BIT_OP_SIZE();
1475 IEM_MC_BEGIN(2, 1, 0);
1476 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1477 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1480 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1481 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1482 IEM_MC_END();
1483}
1484
1485
1486/** Opcode 0x0f 0x01 /1. */
1487FNIEMOP_DEF(iemOp_Grp7_monitor)
1488{
1489 IEMOP_MNEMONIC(monitor, "monitor");
1490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1491 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1492}
1493
1494
1495/** Opcode 0x0f 0x01 /1. */
1496FNIEMOP_DEF(iemOp_Grp7_mwait)
1497{
1498 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_mwait);
1501}
1502
1503
1504/** Opcode 0x0f 0x01 /2. */
1505FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1506{
1507 IEMOP_MNEMONIC(lgdt, "lgdt");
1508 IEMOP_HLP_64BIT_OP_SIZE();
1509 IEM_MC_BEGIN(3, 1, 0);
1510 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1511 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1512 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1515 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1516 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1517 IEM_MC_END();
1518}
1519
1520
1521/** Opcode 0x0f 0x01 0xd0. */
1522FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1523{
1524 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1525 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1526 {
1527 /** @todo r=ramshankar: We should use
1528 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1529 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1530/** @todo testcase: test prefixes and exceptions. currently not checking for the
1531 * OPSIZE one ... */
1532 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1533 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_xgetbv);
1534 }
1535 IEMOP_RAISE_INVALID_OPCODE_RET();
1536}
1537
1538
1539/** Opcode 0x0f 0x01 0xd1. */
1540FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1541{
1542 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1543 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1544 {
1545 /** @todo r=ramshankar: We should use
1546 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1547 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1548/** @todo testcase: test prefixes and exceptions. currently not checking for the
1549 * OPSIZE one ... */
1550 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1551 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_xsetbv);
1552 }
1553 IEMOP_RAISE_INVALID_OPCODE_RET();
1554}
1555
1556
1557/** Opcode 0x0f 0x01 /3. */
1558FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1559{
1560 IEMOP_MNEMONIC(lidt, "lidt");
1561 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1562 IEM_MC_BEGIN(3, 1, 0);
1563 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1564 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1565 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1568 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1569 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1570 IEM_MC_END();
1571}
1572
1573
1574/** Opcode 0x0f 0x01 0xd8. */
1575#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1576FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1577{
1578 IEMOP_MNEMONIC(vmrun, "vmrun");
1579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1580 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1581 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
1582 iemCImpl_vmrun);
1583}
1584#else
1585FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1586#endif
1587
1588/** Opcode 0x0f 0x01 0xd9. */
1589FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1590{
1591 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1592 /** @todo r=bird: Table A-8 on page 524 in vol 3 has VMGEXIT for this
1593 * opcode sequence when F3 or F2 is used as prefix. So, the assumtion
1594 * here cannot be right... */
1595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1596
1597 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1598 want all hypercalls regardless of instruction used, and if a
1599 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1600 (NEM/win makes ASSUMPTIONS about this behavior.) */
1601 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmmcall);
1602}
1603
1604/** Opcode 0x0f 0x01 0xda. */
1605#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1606FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1607{
1608 IEMOP_MNEMONIC(vmload, "vmload");
1609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1610 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmload);
1611}
1612#else
1613FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1614#endif
1615
1616
1617/** Opcode 0x0f 0x01 0xdb. */
1618#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1619FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1620{
1621 IEMOP_MNEMONIC(vmsave, "vmsave");
1622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1623 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmsave);
1624}
1625#else
1626FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1627#endif
1628
1629
1630/** Opcode 0x0f 0x01 0xdc. */
1631#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1632FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1633{
1634 IEMOP_MNEMONIC(stgi, "stgi");
1635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1636 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_stgi);
1637}
1638#else
1639FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1640#endif
1641
1642
1643/** Opcode 0x0f 0x01 0xdd. */
1644#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1645FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1646{
1647 IEMOP_MNEMONIC(clgi, "clgi");
1648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1649 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_clgi);
1650}
1651#else
1652FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1653#endif
1654
1655
1656/** Opcode 0x0f 0x01 0xdf. */
1657#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1658FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1659{
1660 IEMOP_MNEMONIC(invlpga, "invlpga");
1661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1662 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_invlpga);
1663}
1664#else
1665FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1666#endif
1667
1668
1669/** Opcode 0x0f 0x01 0xde. */
1670#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1671FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1672{
1673 IEMOP_MNEMONIC(skinit, "skinit");
1674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1675 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_skinit);
1676}
1677#else
1678FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1679#endif
1680
1681
1682/** Opcode 0x0f 0x01 /4. */
1683FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1684{
1685 IEMOP_MNEMONIC(smsw, "smsw");
1686 IEMOP_HLP_MIN_286();
1687 if (IEM_IS_MODRM_REG_MODE(bRm))
1688 {
1689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1690 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1691 }
1692
1693 /* Ignore operand size here, memory refs are always 16-bit. */
1694 IEM_MC_BEGIN(2, 0, 0);
1695 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1696 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1699 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1700 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1701 IEM_MC_END();
1702}
1703
1704
1705/** Opcode 0x0f 0x01 /6. */
1706FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1707{
1708 /* The operand size is effectively ignored, all is 16-bit and only the
1709 lower 3-bits are used. */
1710 IEMOP_MNEMONIC(lmsw, "lmsw");
1711 IEMOP_HLP_MIN_286();
1712 if (IEM_IS_MODRM_REG_MODE(bRm))
1713 {
1714 IEM_MC_BEGIN(2, 0, 0);
1715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1716 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1717 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1718 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1719 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1720 IEM_MC_END();
1721 }
1722 else
1723 {
1724 IEM_MC_BEGIN(2, 0, 0);
1725 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1726 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1729 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1730 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1731 IEM_MC_END();
1732 }
1733}
1734
1735
1736/** Opcode 0x0f 0x01 /7. */
1737FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1738{
1739 IEMOP_MNEMONIC(invlpg, "invlpg");
1740 IEMOP_HLP_MIN_486();
1741 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386);
1742 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1745 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_invlpg, GCPtrEffDst);
1746 IEM_MC_END();
1747}
1748
1749
1750/** Opcode 0x0f 0x01 0xf8. */
1751FNIEMOP_DEF(iemOp_Grp7_swapgs)
1752{
1753 IEMOP_MNEMONIC(swapgs, "swapgs");
1754 IEMOP_HLP_ONLY_64BIT();
1755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1756 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_swapgs);
1757}
1758
1759
1760/** Opcode 0x0f 0x01 0xf9. */
1761FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1762{
1763 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1765 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdtscp);
1766}
1767
1768
1769/**
1770 * Group 7 jump table, memory variant.
1771 */
1772IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1773{
1774 iemOp_Grp7_sgdt,
1775 iemOp_Grp7_sidt,
1776 iemOp_Grp7_lgdt,
1777 iemOp_Grp7_lidt,
1778 iemOp_Grp7_smsw,
1779 iemOp_InvalidWithRM,
1780 iemOp_Grp7_lmsw,
1781 iemOp_Grp7_invlpg
1782};
1783
1784
1785/** Opcode 0x0f 0x01. */
1786FNIEMOP_DEF(iemOp_Grp7)
1787{
1788 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1789 if (IEM_IS_MODRM_MEM_MODE(bRm))
1790 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1791
1792 switch (IEM_GET_MODRM_REG_8(bRm))
1793 {
1794 case 0:
1795 switch (IEM_GET_MODRM_RM_8(bRm))
1796 {
1797 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1798 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1799 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1800 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1801 }
1802 IEMOP_RAISE_INVALID_OPCODE_RET();
1803
1804 case 1:
1805 switch (IEM_GET_MODRM_RM_8(bRm))
1806 {
1807 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1808 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1809 }
1810 IEMOP_RAISE_INVALID_OPCODE_RET();
1811
1812 case 2:
1813 switch (IEM_GET_MODRM_RM_8(bRm))
1814 {
1815 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1816 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1817 }
1818 IEMOP_RAISE_INVALID_OPCODE_RET();
1819
1820 case 3:
1821 switch (IEM_GET_MODRM_RM_8(bRm))
1822 {
1823 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1824 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1825 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1826 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1827 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1828 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1829 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1830 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1831 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1832 }
1833
1834 case 4:
1835 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1836
1837 case 5:
1838 IEMOP_RAISE_INVALID_OPCODE_RET();
1839
1840 case 6:
1841 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1842
1843 case 7:
1844 switch (IEM_GET_MODRM_RM_8(bRm))
1845 {
1846 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1847 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1848 }
1849 IEMOP_RAISE_INVALID_OPCODE_RET();
1850
1851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1852 }
1853}
1854
1855/** Opcode 0x0f 0x00 /3. */
1856FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1857{
1858 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1859 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1860
1861 if (IEM_IS_MODRM_REG_MODE(bRm))
1862 {
1863 switch (pVCpu->iem.s.enmEffOpSize)
1864 {
1865 case IEMMODE_16BIT:
1866 IEM_MC_BEGIN(3, 0, 0);
1867 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1868 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1869 IEM_MC_ARG(uint16_t, u16Sel, 1);
1870 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1871
1872 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1873 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1874 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1875
1876 IEM_MC_END();
1877 break;
1878
1879 case IEMMODE_32BIT:
1880 case IEMMODE_64BIT:
1881 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386);
1882 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1883 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1884 IEM_MC_ARG(uint16_t, u16Sel, 1);
1885 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1886
1887 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1888 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1889 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1890
1891 IEM_MC_END();
1892 break;
1893
1894 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1895 }
1896 }
1897 else
1898 {
1899 switch (pVCpu->iem.s.enmEffOpSize)
1900 {
1901 case IEMMODE_16BIT:
1902 IEM_MC_BEGIN(3, 1, 0);
1903 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1904 IEM_MC_ARG(uint16_t, u16Sel, 1);
1905 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1907
1908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1909 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1910
1911 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1912 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1913 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1914
1915 IEM_MC_END();
1916 break;
1917
1918 case IEMMODE_32BIT:
1919 case IEMMODE_64BIT:
1920 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386);
1921 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1922 IEM_MC_ARG(uint16_t, u16Sel, 1);
1923 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1925
1926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1927 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1928/** @todo testcase: make sure it's a 16-bit read. */
1929
1930 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1931 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1932 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1933
1934 IEM_MC_END();
1935 break;
1936
1937 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1938 }
1939 }
1940}
1941
1942
1943
1944/** Opcode 0x0f 0x02. */
1945FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1946{
1947 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1948 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1949}
1950
1951
1952/** Opcode 0x0f 0x03. */
1953FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1954{
1955 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1956 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1957}
1958
1959
1960/** Opcode 0x0f 0x05. */
1961FNIEMOP_DEF(iemOp_syscall)
1962{
1963 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1965 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1966 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1967 iemCImpl_syscall);
1968}
1969
1970
1971/** Opcode 0x0f 0x06. */
1972FNIEMOP_DEF(iemOp_clts)
1973{
1974 IEMOP_MNEMONIC(clts, "clts");
1975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1976 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_clts);
1977}
1978
1979
1980/** Opcode 0x0f 0x07. */
1981FNIEMOP_DEF(iemOp_sysret)
1982{
1983 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1985 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1986 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1987 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
1988}
1989
1990
1991/** Opcode 0x0f 0x08. */
1992FNIEMOP_DEF(iemOp_invd)
1993{
1994 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1995 IEMOP_HLP_MIN_486();
1996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1997 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_invd);
1998}
1999
2000
2001/** Opcode 0x0f 0x09. */
2002FNIEMOP_DEF(iemOp_wbinvd)
2003{
2004 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
2005 IEMOP_HLP_MIN_486();
2006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2007 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_wbinvd);
2008}
2009
2010
2011/** Opcode 0x0f 0x0b. */
2012FNIEMOP_DEF(iemOp_ud2)
2013{
2014 IEMOP_MNEMONIC(ud2, "ud2");
2015 IEMOP_RAISE_INVALID_OPCODE_RET();
2016}
2017
2018/** Opcode 0x0f 0x0d. */
2019FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
2020{
2021 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
2022 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
2023 {
2024 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
2025 IEMOP_RAISE_INVALID_OPCODE_RET();
2026 }
2027
2028 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2029 if (IEM_IS_MODRM_REG_MODE(bRm))
2030 {
2031 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
2032 IEMOP_RAISE_INVALID_OPCODE_RET();
2033 }
2034
2035 switch (IEM_GET_MODRM_REG_8(bRm))
2036 {
2037 case 2: /* Aliased to /0 for the time being. */
2038 case 4: /* Aliased to /0 for the time being. */
2039 case 5: /* Aliased to /0 for the time being. */
2040 case 6: /* Aliased to /0 for the time being. */
2041 case 7: /* Aliased to /0 for the time being. */
2042 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2043 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2044 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2045 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2046 }
2047
2048 IEM_MC_BEGIN(0, 1, 0);
2049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2052 /* Currently a NOP. */
2053 NOREF(GCPtrEffSrc);
2054 IEM_MC_ADVANCE_RIP_AND_FINISH();
2055 IEM_MC_END();
2056}
2057
2058
2059/** Opcode 0x0f 0x0e. */
2060FNIEMOP_DEF(iemOp_femms)
2061{
2062 IEMOP_MNEMONIC(femms, "femms");
2063
2064 IEM_MC_BEGIN(0, 0, 0);
2065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2066 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2067 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2068 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2069 IEM_MC_FPU_FROM_MMX_MODE();
2070 IEM_MC_ADVANCE_RIP_AND_FINISH();
2071 IEM_MC_END();
2072}
2073
2074
2075/** Opcode 0x0f 0x0f. */
2076FNIEMOP_DEF(iemOp_3Dnow)
2077{
2078 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2079 {
2080 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2081 IEMOP_RAISE_INVALID_OPCODE_RET();
2082 }
2083
2084#ifdef IEM_WITH_3DNOW
2085 /* This is pretty sparse, use switch instead of table. */
2086 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2087 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2088#else
2089 IEMOP_BITCH_ABOUT_STUB();
2090 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2091#endif
2092}
2093
2094
2095/**
2096 * @opcode 0x10
2097 * @oppfx none
2098 * @opcpuid sse
2099 * @opgroup og_sse_simdfp_datamove
2100 * @opxcpttype 4UA
2101 * @optest op1=1 op2=2 -> op1=2
2102 * @optest op1=0 op2=-22 -> op1=-22
2103 */
2104FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2105{
2106 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2108 if (IEM_IS_MODRM_REG_MODE(bRm))
2109 {
2110 /*
2111 * XMM128, XMM128.
2112 */
2113 IEM_MC_BEGIN(0, 0, 0);
2114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2115 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2116 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2117 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2118 IEM_GET_MODRM_RM(pVCpu, bRm));
2119 IEM_MC_ADVANCE_RIP_AND_FINISH();
2120 IEM_MC_END();
2121 }
2122 else
2123 {
2124 /*
2125 * XMM128, [mem128].
2126 */
2127 IEM_MC_BEGIN(0, 2, 0);
2128 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2130
2131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2133 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2134 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2135
2136 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2137 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2138
2139 IEM_MC_ADVANCE_RIP_AND_FINISH();
2140 IEM_MC_END();
2141 }
2142
2143}
2144
2145
2146/**
2147 * @opcode 0x10
2148 * @oppfx 0x66
2149 * @opcpuid sse2
2150 * @opgroup og_sse2_pcksclr_datamove
2151 * @opxcpttype 4UA
2152 * @optest op1=1 op2=2 -> op1=2
2153 * @optest op1=0 op2=-42 -> op1=-42
2154 */
2155FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2156{
2157 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2159 if (IEM_IS_MODRM_REG_MODE(bRm))
2160 {
2161 /*
2162 * XMM128, XMM128.
2163 */
2164 IEM_MC_BEGIN(0, 0, 0);
2165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2166 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2167 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2168 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2169 IEM_GET_MODRM_RM(pVCpu, bRm));
2170 IEM_MC_ADVANCE_RIP_AND_FINISH();
2171 IEM_MC_END();
2172 }
2173 else
2174 {
2175 /*
2176 * XMM128, [mem128].
2177 */
2178 IEM_MC_BEGIN(0, 2, 0);
2179 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2181
2182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2184 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2185 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2186
2187 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2188 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2189
2190 IEM_MC_ADVANCE_RIP_AND_FINISH();
2191 IEM_MC_END();
2192 }
2193}
2194
2195
2196/**
2197 * @opcode 0x10
2198 * @oppfx 0xf3
2199 * @opcpuid sse
2200 * @opgroup og_sse_simdfp_datamove
2201 * @opxcpttype 5
2202 * @optest op1=1 op2=2 -> op1=2
2203 * @optest op1=0 op2=-22 -> op1=-22
2204 */
2205FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2206{
2207 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2208 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2209 if (IEM_IS_MODRM_REG_MODE(bRm))
2210 {
2211 /*
2212 * XMM32, XMM32.
2213 */
2214 IEM_MC_BEGIN(0, 1, 0);
2215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2216 IEM_MC_LOCAL(uint32_t, uSrc);
2217
2218 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2219 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2220 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2221 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2222
2223 IEM_MC_ADVANCE_RIP_AND_FINISH();
2224 IEM_MC_END();
2225 }
2226 else
2227 {
2228 /*
2229 * XMM128, [mem32].
2230 */
2231 IEM_MC_BEGIN(0, 2, 0);
2232 IEM_MC_LOCAL(uint32_t, uSrc);
2233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2234
2235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2237 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2238 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2239
2240 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2241 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2242
2243 IEM_MC_ADVANCE_RIP_AND_FINISH();
2244 IEM_MC_END();
2245 }
2246}
2247
2248
2249/**
2250 * @opcode 0x10
2251 * @oppfx 0xf2
2252 * @opcpuid sse2
2253 * @opgroup og_sse2_pcksclr_datamove
2254 * @opxcpttype 5
2255 * @optest op1=1 op2=2 -> op1=2
2256 * @optest op1=0 op2=-42 -> op1=-42
2257 */
2258FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2259{
2260 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2262 if (IEM_IS_MODRM_REG_MODE(bRm))
2263 {
2264 /*
2265 * XMM64, XMM64.
2266 */
2267 IEM_MC_BEGIN(0, 1, 0);
2268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2269 IEM_MC_LOCAL(uint64_t, uSrc);
2270
2271 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2272 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2273 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2274 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2275
2276 IEM_MC_ADVANCE_RIP_AND_FINISH();
2277 IEM_MC_END();
2278 }
2279 else
2280 {
2281 /*
2282 * XMM128, [mem64].
2283 */
2284 IEM_MC_BEGIN(0, 2, 0);
2285 IEM_MC_LOCAL(uint64_t, uSrc);
2286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2287
2288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2290 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2291 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2292
2293 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2294 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2295
2296 IEM_MC_ADVANCE_RIP_AND_FINISH();
2297 IEM_MC_END();
2298 }
2299}
2300
2301
2302/**
2303 * @opcode 0x11
2304 * @oppfx none
2305 * @opcpuid sse
2306 * @opgroup og_sse_simdfp_datamove
2307 * @opxcpttype 4UA
2308 * @optest op1=1 op2=2 -> op1=2
2309 * @optest op1=0 op2=-42 -> op1=-42
2310 */
2311FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2312{
2313 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2315 if (IEM_IS_MODRM_REG_MODE(bRm))
2316 {
2317 /*
2318 * XMM128, XMM128.
2319 */
2320 IEM_MC_BEGIN(0, 0, 0);
2321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2322 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2323 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2324 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2325 IEM_GET_MODRM_REG(pVCpu, bRm));
2326 IEM_MC_ADVANCE_RIP_AND_FINISH();
2327 IEM_MC_END();
2328 }
2329 else
2330 {
2331 /*
2332 * [mem128], XMM128.
2333 */
2334 IEM_MC_BEGIN(0, 2, 0);
2335 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2337
2338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2340 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2341 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2342
2343 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2344 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2345
2346 IEM_MC_ADVANCE_RIP_AND_FINISH();
2347 IEM_MC_END();
2348 }
2349}
2350
2351
2352/**
2353 * @opcode 0x11
2354 * @oppfx 0x66
2355 * @opcpuid sse2
2356 * @opgroup og_sse2_pcksclr_datamove
2357 * @opxcpttype 4UA
2358 * @optest op1=1 op2=2 -> op1=2
2359 * @optest op1=0 op2=-42 -> op1=-42
2360 */
2361FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2362{
2363 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2365 if (IEM_IS_MODRM_REG_MODE(bRm))
2366 {
2367 /*
2368 * XMM128, XMM128.
2369 */
2370 IEM_MC_BEGIN(0, 0, 0);
2371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2372 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2373 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2374 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2375 IEM_GET_MODRM_REG(pVCpu, bRm));
2376 IEM_MC_ADVANCE_RIP_AND_FINISH();
2377 IEM_MC_END();
2378 }
2379 else
2380 {
2381 /*
2382 * [mem128], XMM128.
2383 */
2384 IEM_MC_BEGIN(0, 2, 0);
2385 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2387
2388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2390 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2391 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2392
2393 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2394 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2395
2396 IEM_MC_ADVANCE_RIP_AND_FINISH();
2397 IEM_MC_END();
2398 }
2399}
2400
2401
2402/**
2403 * @opcode 0x11
2404 * @oppfx 0xf3
2405 * @opcpuid sse
2406 * @opgroup og_sse_simdfp_datamove
2407 * @opxcpttype 5
2408 * @optest op1=1 op2=2 -> op1=2
2409 * @optest op1=0 op2=-22 -> op1=-22
2410 */
2411FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2412{
2413 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2414 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2415 if (IEM_IS_MODRM_REG_MODE(bRm))
2416 {
2417 /*
2418 * XMM32, XMM32.
2419 */
2420 IEM_MC_BEGIN(0, 1, 0);
2421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2422 IEM_MC_LOCAL(uint32_t, uSrc);
2423
2424 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2425 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2426 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2427 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2428
2429 IEM_MC_ADVANCE_RIP_AND_FINISH();
2430 IEM_MC_END();
2431 }
2432 else
2433 {
2434 /*
2435 * [mem32], XMM32.
2436 */
2437 IEM_MC_BEGIN(0, 2, 0);
2438 IEM_MC_LOCAL(uint32_t, uSrc);
2439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2440
2441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2443 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2444 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2445
2446 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2447 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2448
2449 IEM_MC_ADVANCE_RIP_AND_FINISH();
2450 IEM_MC_END();
2451 }
2452}
2453
2454
2455/**
2456 * @opcode 0x11
2457 * @oppfx 0xf2
2458 * @opcpuid sse2
2459 * @opgroup og_sse2_pcksclr_datamove
2460 * @opxcpttype 5
2461 * @optest op1=1 op2=2 -> op1=2
2462 * @optest op1=0 op2=-42 -> op1=-42
2463 */
2464FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2465{
2466 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2468 if (IEM_IS_MODRM_REG_MODE(bRm))
2469 {
2470 /*
2471 * XMM64, XMM64.
2472 */
2473 IEM_MC_BEGIN(0, 1, 0);
2474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2475 IEM_MC_LOCAL(uint64_t, uSrc);
2476
2477 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2478 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2479 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2480 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2481
2482 IEM_MC_ADVANCE_RIP_AND_FINISH();
2483 IEM_MC_END();
2484 }
2485 else
2486 {
2487 /*
2488 * [mem64], XMM64.
2489 */
2490 IEM_MC_BEGIN(0, 2, 0);
2491 IEM_MC_LOCAL(uint64_t, uSrc);
2492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2493
2494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2496 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2497 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2498
2499 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2500 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2501
2502 IEM_MC_ADVANCE_RIP_AND_FINISH();
2503 IEM_MC_END();
2504 }
2505}
2506
2507
2508FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2509{
2510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2511 if (IEM_IS_MODRM_REG_MODE(bRm))
2512 {
2513 /**
2514 * @opcode 0x12
2515 * @opcodesub 11 mr/reg
2516 * @oppfx none
2517 * @opcpuid sse
2518 * @opgroup og_sse_simdfp_datamove
2519 * @opxcpttype 5
2520 * @optest op1=1 op2=2 -> op1=2
2521 * @optest op1=0 op2=-42 -> op1=-42
2522 */
2523 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2524
2525 IEM_MC_BEGIN(0, 1, 0);
2526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2527 IEM_MC_LOCAL(uint64_t, uSrc);
2528
2529 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2530 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2531 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2532 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2533
2534 IEM_MC_ADVANCE_RIP_AND_FINISH();
2535 IEM_MC_END();
2536 }
2537 else
2538 {
2539 /**
2540 * @opdone
2541 * @opcode 0x12
2542 * @opcodesub !11 mr/reg
2543 * @oppfx none
2544 * @opcpuid sse
2545 * @opgroup og_sse_simdfp_datamove
2546 * @opxcpttype 5
2547 * @optest op1=1 op2=2 -> op1=2
2548 * @optest op1=0 op2=-42 -> op1=-42
2549 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2550 */
2551 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2552
2553 IEM_MC_BEGIN(0, 2, 0);
2554 IEM_MC_LOCAL(uint64_t, uSrc);
2555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2556
2557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2559 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2560 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2561
2562 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2563 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2564
2565 IEM_MC_ADVANCE_RIP_AND_FINISH();
2566 IEM_MC_END();
2567 }
2568}
2569
2570
2571/**
2572 * @opcode 0x12
2573 * @opcodesub !11 mr/reg
2574 * @oppfx 0x66
2575 * @opcpuid sse2
2576 * @opgroup og_sse2_pcksclr_datamove
2577 * @opxcpttype 5
2578 * @optest op1=1 op2=2 -> op1=2
2579 * @optest op1=0 op2=-42 -> op1=-42
2580 */
2581FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2582{
2583 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2584 if (IEM_IS_MODRM_MEM_MODE(bRm))
2585 {
2586 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2587
2588 IEM_MC_BEGIN(0, 2, 0);
2589 IEM_MC_LOCAL(uint64_t, uSrc);
2590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2591
2592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2594 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2595 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2596
2597 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2598 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2599
2600 IEM_MC_ADVANCE_RIP_AND_FINISH();
2601 IEM_MC_END();
2602 }
2603
2604 /**
2605 * @opdone
2606 * @opmnemonic ud660f12m3
2607 * @opcode 0x12
2608 * @opcodesub 11 mr/reg
2609 * @oppfx 0x66
2610 * @opunused immediate
2611 * @opcpuid sse
2612 * @optest ->
2613 */
2614 else
2615 IEMOP_RAISE_INVALID_OPCODE_RET();
2616}
2617
2618
2619/**
2620 * @opcode 0x12
2621 * @oppfx 0xf3
2622 * @opcpuid sse3
2623 * @opgroup og_sse3_pcksclr_datamove
2624 * @opxcpttype 4
2625 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2626 * op1=0x00000002000000020000000100000001
2627 */
2628FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2629{
2630 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2632 if (IEM_IS_MODRM_REG_MODE(bRm))
2633 {
2634 /*
2635 * XMM, XMM.
2636 */
2637 IEM_MC_BEGIN(0, 1, 0);
2638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2639 IEM_MC_LOCAL(RTUINT128U, uSrc);
2640
2641 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2642 IEM_MC_PREPARE_SSE_USAGE();
2643
2644 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2645 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2646 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2647 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2648 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2649
2650 IEM_MC_ADVANCE_RIP_AND_FINISH();
2651 IEM_MC_END();
2652 }
2653 else
2654 {
2655 /*
2656 * XMM, [mem128].
2657 */
2658 IEM_MC_BEGIN(0, 2, 0);
2659 IEM_MC_LOCAL(RTUINT128U, uSrc);
2660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2661
2662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2664 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2665 IEM_MC_PREPARE_SSE_USAGE();
2666
2667 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2668 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2669 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2670 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2671 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2672
2673 IEM_MC_ADVANCE_RIP_AND_FINISH();
2674 IEM_MC_END();
2675 }
2676}
2677
2678
2679/**
2680 * @opcode 0x12
2681 * @oppfx 0xf2
2682 * @opcpuid sse3
2683 * @opgroup og_sse3_pcksclr_datamove
2684 * @opxcpttype 5
2685 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2686 * op1=0x22222222111111112222222211111111
2687 */
2688FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2689{
2690 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2691 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2692 if (IEM_IS_MODRM_REG_MODE(bRm))
2693 {
2694 /*
2695 * XMM128, XMM64.
2696 */
2697 IEM_MC_BEGIN(1, 0, 0);
2698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2699 IEM_MC_ARG(uint64_t, uSrc, 0);
2700
2701 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2702 IEM_MC_PREPARE_SSE_USAGE();
2703
2704 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2705 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2706 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2707
2708 IEM_MC_ADVANCE_RIP_AND_FINISH();
2709 IEM_MC_END();
2710 }
2711 else
2712 {
2713 /*
2714 * XMM128, [mem64].
2715 */
2716 IEM_MC_BEGIN(1, 1, 0);
2717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2718 IEM_MC_ARG(uint64_t, uSrc, 0);
2719
2720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2722 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2723 IEM_MC_PREPARE_SSE_USAGE();
2724
2725 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2726 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2727 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2728
2729 IEM_MC_ADVANCE_RIP_AND_FINISH();
2730 IEM_MC_END();
2731 }
2732}
2733
2734
2735/**
2736 * @opcode 0x13
2737 * @opcodesub !11 mr/reg
2738 * @oppfx none
2739 * @opcpuid sse
2740 * @opgroup og_sse_simdfp_datamove
2741 * @opxcpttype 5
2742 * @optest op1=1 op2=2 -> op1=2
2743 * @optest op1=0 op2=-42 -> op1=-42
2744 */
2745FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2746{
2747 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2748 if (IEM_IS_MODRM_MEM_MODE(bRm))
2749 {
2750 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2751
2752 IEM_MC_BEGIN(0, 2, 0);
2753 IEM_MC_LOCAL(uint64_t, uSrc);
2754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2755
2756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2758 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2759 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2760
2761 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2762 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2763
2764 IEM_MC_ADVANCE_RIP_AND_FINISH();
2765 IEM_MC_END();
2766 }
2767
2768 /**
2769 * @opdone
2770 * @opmnemonic ud0f13m3
2771 * @opcode 0x13
2772 * @opcodesub 11 mr/reg
2773 * @oppfx none
2774 * @opunused immediate
2775 * @opcpuid sse
2776 * @optest ->
2777 */
2778 else
2779 IEMOP_RAISE_INVALID_OPCODE_RET();
2780}
2781
2782
2783/**
2784 * @opcode 0x13
2785 * @opcodesub !11 mr/reg
2786 * @oppfx 0x66
2787 * @opcpuid sse2
2788 * @opgroup og_sse2_pcksclr_datamove
2789 * @opxcpttype 5
2790 * @optest op1=1 op2=2 -> op1=2
2791 * @optest op1=0 op2=-42 -> op1=-42
2792 */
2793FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2794{
2795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2796 if (IEM_IS_MODRM_MEM_MODE(bRm))
2797 {
2798 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2799
2800 IEM_MC_BEGIN(0, 2, 0);
2801 IEM_MC_LOCAL(uint64_t, uSrc);
2802 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2803
2804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2806 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2807 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2808
2809 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2810 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2811
2812 IEM_MC_ADVANCE_RIP_AND_FINISH();
2813 IEM_MC_END();
2814 }
2815
2816 /**
2817 * @opdone
2818 * @opmnemonic ud660f13m3
2819 * @opcode 0x13
2820 * @opcodesub 11 mr/reg
2821 * @oppfx 0x66
2822 * @opunused immediate
2823 * @opcpuid sse
2824 * @optest ->
2825 */
2826 else
2827 IEMOP_RAISE_INVALID_OPCODE_RET();
2828}
2829
2830
2831/**
2832 * @opmnemonic udf30f13
2833 * @opcode 0x13
2834 * @oppfx 0xf3
2835 * @opunused intel-modrm
2836 * @opcpuid sse
2837 * @optest ->
2838 * @opdone
2839 */
2840
2841/**
2842 * @opmnemonic udf20f13
2843 * @opcode 0x13
2844 * @oppfx 0xf2
2845 * @opunused intel-modrm
2846 * @opcpuid sse
2847 * @optest ->
2848 * @opdone
2849 */
2850
2851/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2852FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2853{
2854 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2855 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2856}
2857
2858
2859/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2860FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2861{
2862 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2863 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2864}
2865
2866
2867/**
2868 * @opdone
2869 * @opmnemonic udf30f14
2870 * @opcode 0x14
2871 * @oppfx 0xf3
2872 * @opunused intel-modrm
2873 * @opcpuid sse
2874 * @optest ->
2875 * @opdone
2876 */
2877
2878/**
2879 * @opmnemonic udf20f14
2880 * @opcode 0x14
2881 * @oppfx 0xf2
2882 * @opunused intel-modrm
2883 * @opcpuid sse
2884 * @optest ->
2885 * @opdone
2886 */
2887
2888/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2889FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2890{
2891 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2892 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2893}
2894
2895
2896/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2897FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2898{
2899 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2900 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2901}
2902
2903
2904/* Opcode 0xf3 0x0f 0x15 - invalid */
2905/* Opcode 0xf2 0x0f 0x15 - invalid */
2906
2907/**
2908 * @opdone
2909 * @opmnemonic udf30f15
2910 * @opcode 0x15
2911 * @oppfx 0xf3
2912 * @opunused intel-modrm
2913 * @opcpuid sse
2914 * @optest ->
2915 * @opdone
2916 */
2917
2918/**
2919 * @opmnemonic udf20f15
2920 * @opcode 0x15
2921 * @oppfx 0xf2
2922 * @opunused intel-modrm
2923 * @opcpuid sse
2924 * @optest ->
2925 * @opdone
2926 */
2927
2928FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2929{
2930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2931 if (IEM_IS_MODRM_REG_MODE(bRm))
2932 {
2933 /**
2934 * @opcode 0x16
2935 * @opcodesub 11 mr/reg
2936 * @oppfx none
2937 * @opcpuid sse
2938 * @opgroup og_sse_simdfp_datamove
2939 * @opxcpttype 5
2940 * @optest op1=1 op2=2 -> op1=2
2941 * @optest op1=0 op2=-42 -> op1=-42
2942 */
2943 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2944
2945 IEM_MC_BEGIN(0, 1, 0);
2946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2947 IEM_MC_LOCAL(uint64_t, uSrc);
2948
2949 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2950 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2951 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2952 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2953
2954 IEM_MC_ADVANCE_RIP_AND_FINISH();
2955 IEM_MC_END();
2956 }
2957 else
2958 {
2959 /**
2960 * @opdone
2961 * @opcode 0x16
2962 * @opcodesub !11 mr/reg
2963 * @oppfx none
2964 * @opcpuid sse
2965 * @opgroup og_sse_simdfp_datamove
2966 * @opxcpttype 5
2967 * @optest op1=1 op2=2 -> op1=2
2968 * @optest op1=0 op2=-42 -> op1=-42
2969 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2970 */
2971 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2972
2973 IEM_MC_BEGIN(0, 2, 0);
2974 IEM_MC_LOCAL(uint64_t, uSrc);
2975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2976
2977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2979 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2980 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2981
2982 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2983 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2984
2985 IEM_MC_ADVANCE_RIP_AND_FINISH();
2986 IEM_MC_END();
2987 }
2988}
2989
2990
2991/**
2992 * @opcode 0x16
2993 * @opcodesub !11 mr/reg
2994 * @oppfx 0x66
2995 * @opcpuid sse2
2996 * @opgroup og_sse2_pcksclr_datamove
2997 * @opxcpttype 5
2998 * @optest op1=1 op2=2 -> op1=2
2999 * @optest op1=0 op2=-42 -> op1=-42
3000 */
3001FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
3002{
3003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3004 if (IEM_IS_MODRM_MEM_MODE(bRm))
3005 {
3006 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3007
3008 IEM_MC_BEGIN(0, 2, 0);
3009 IEM_MC_LOCAL(uint64_t, uSrc);
3010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3011
3012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3014 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3015 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3016
3017 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3018 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3019
3020 IEM_MC_ADVANCE_RIP_AND_FINISH();
3021 IEM_MC_END();
3022 }
3023
3024 /**
3025 * @opdone
3026 * @opmnemonic ud660f16m3
3027 * @opcode 0x16
3028 * @opcodesub 11 mr/reg
3029 * @oppfx 0x66
3030 * @opunused immediate
3031 * @opcpuid sse
3032 * @optest ->
3033 */
3034 else
3035 IEMOP_RAISE_INVALID_OPCODE_RET();
3036}
3037
3038
3039/**
3040 * @opcode 0x16
3041 * @oppfx 0xf3
3042 * @opcpuid sse3
3043 * @opgroup og_sse3_pcksclr_datamove
3044 * @opxcpttype 4
3045 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3046 * op1=0x00000002000000020000000100000001
3047 */
3048FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3049{
3050 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3052 if (IEM_IS_MODRM_REG_MODE(bRm))
3053 {
3054 /*
3055 * XMM128, XMM128.
3056 */
3057 IEM_MC_BEGIN(0, 1, 0);
3058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3059 IEM_MC_LOCAL(RTUINT128U, uSrc);
3060
3061 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3062 IEM_MC_PREPARE_SSE_USAGE();
3063
3064 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3065 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3066 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3067 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3068 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3069
3070 IEM_MC_ADVANCE_RIP_AND_FINISH();
3071 IEM_MC_END();
3072 }
3073 else
3074 {
3075 /*
3076 * XMM128, [mem128].
3077 */
3078 IEM_MC_BEGIN(0, 2, 0);
3079 IEM_MC_LOCAL(RTUINT128U, uSrc);
3080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3081
3082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3084 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3085 IEM_MC_PREPARE_SSE_USAGE();
3086
3087 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3088 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3089 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3090 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3091 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3092
3093 IEM_MC_ADVANCE_RIP_AND_FINISH();
3094 IEM_MC_END();
3095 }
3096}
3097
3098/**
3099 * @opdone
3100 * @opmnemonic udf30f16
3101 * @opcode 0x16
3102 * @oppfx 0xf2
3103 * @opunused intel-modrm
3104 * @opcpuid sse
3105 * @optest ->
3106 * @opdone
3107 */
3108
3109
3110/**
3111 * @opcode 0x17
3112 * @opcodesub !11 mr/reg
3113 * @oppfx none
3114 * @opcpuid sse
3115 * @opgroup og_sse_simdfp_datamove
3116 * @opxcpttype 5
3117 * @optest op1=1 op2=2 -> op1=2
3118 * @optest op1=0 op2=-42 -> op1=-42
3119 */
3120FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3121{
3122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3123 if (IEM_IS_MODRM_MEM_MODE(bRm))
3124 {
3125 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3126
3127 IEM_MC_BEGIN(0, 2, 0);
3128 IEM_MC_LOCAL(uint64_t, uSrc);
3129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3130
3131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3133 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3134 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3135
3136 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3137 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3138
3139 IEM_MC_ADVANCE_RIP_AND_FINISH();
3140 IEM_MC_END();
3141 }
3142
3143 /**
3144 * @opdone
3145 * @opmnemonic ud0f17m3
3146 * @opcode 0x17
3147 * @opcodesub 11 mr/reg
3148 * @oppfx none
3149 * @opunused immediate
3150 * @opcpuid sse
3151 * @optest ->
3152 */
3153 else
3154 IEMOP_RAISE_INVALID_OPCODE_RET();
3155}
3156
3157
3158/**
3159 * @opcode 0x17
3160 * @opcodesub !11 mr/reg
3161 * @oppfx 0x66
3162 * @opcpuid sse2
3163 * @opgroup og_sse2_pcksclr_datamove
3164 * @opxcpttype 5
3165 * @optest op1=1 op2=2 -> op1=2
3166 * @optest op1=0 op2=-42 -> op1=-42
3167 */
3168FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3169{
3170 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3171 if (IEM_IS_MODRM_MEM_MODE(bRm))
3172 {
3173 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3174
3175 IEM_MC_BEGIN(0, 2, 0);
3176 IEM_MC_LOCAL(uint64_t, uSrc);
3177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3178
3179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3181 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3182 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3183
3184 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3185 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3186
3187 IEM_MC_ADVANCE_RIP_AND_FINISH();
3188 IEM_MC_END();
3189 }
3190
3191 /**
3192 * @opdone
3193 * @opmnemonic ud660f17m3
3194 * @opcode 0x17
3195 * @opcodesub 11 mr/reg
3196 * @oppfx 0x66
3197 * @opunused immediate
3198 * @opcpuid sse
3199 * @optest ->
3200 */
3201 else
3202 IEMOP_RAISE_INVALID_OPCODE_RET();
3203}
3204
3205
3206/**
3207 * @opdone
3208 * @opmnemonic udf30f17
3209 * @opcode 0x17
3210 * @oppfx 0xf3
3211 * @opunused intel-modrm
3212 * @opcpuid sse
3213 * @optest ->
3214 * @opdone
3215 */
3216
3217/**
3218 * @opmnemonic udf20f17
3219 * @opcode 0x17
3220 * @oppfx 0xf2
3221 * @opunused intel-modrm
3222 * @opcpuid sse
3223 * @optest ->
3224 * @opdone
3225 */
3226
3227
3228/** Opcode 0x0f 0x18. */
3229FNIEMOP_DEF(iemOp_prefetch_Grp16)
3230{
3231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3232 if (IEM_IS_MODRM_MEM_MODE(bRm))
3233 {
3234 switch (IEM_GET_MODRM_REG_8(bRm))
3235 {
3236 case 4: /* Aliased to /0 for the time being according to AMD. */
3237 case 5: /* Aliased to /0 for the time being according to AMD. */
3238 case 6: /* Aliased to /0 for the time being according to AMD. */
3239 case 7: /* Aliased to /0 for the time being according to AMD. */
3240 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3241 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3242 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3243 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3244 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3245 }
3246
3247 IEM_MC_BEGIN(0, 1, 0);
3248 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3251 /* Currently a NOP. */
3252 NOREF(GCPtrEffSrc);
3253 IEM_MC_ADVANCE_RIP_AND_FINISH();
3254 IEM_MC_END();
3255 }
3256 else
3257 IEMOP_RAISE_INVALID_OPCODE_RET();
3258}
3259
3260
3261/** Opcode 0x0f 0x19..0x1f. */
3262FNIEMOP_DEF(iemOp_nop_Ev)
3263{
3264 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3265 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3266 if (IEM_IS_MODRM_REG_MODE(bRm))
3267 {
3268 IEM_MC_BEGIN(0, 0, 0);
3269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3270 IEM_MC_ADVANCE_RIP_AND_FINISH();
3271 IEM_MC_END();
3272 }
3273 else
3274 {
3275 IEM_MC_BEGIN(0, 1, 0);
3276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3279 /* Currently a NOP. */
3280 NOREF(GCPtrEffSrc);
3281 IEM_MC_ADVANCE_RIP_AND_FINISH();
3282 IEM_MC_END();
3283 }
3284}
3285
3286
3287/** Opcode 0x0f 0x20. */
3288FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3289{
3290 /* mod is ignored, as is operand size overrides. */
3291 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3292 IEMOP_HLP_MIN_386();
3293 if (IEM_IS_64BIT_CODE(pVCpu))
3294 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3295 else
3296 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3297
3298 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3299 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3300 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3301 {
3302 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3303 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3304 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3305 iCrReg |= 8;
3306 }
3307 switch (iCrReg)
3308 {
3309 case 0: case 2: case 3: case 4: case 8:
3310 break;
3311 default:
3312 IEMOP_RAISE_INVALID_OPCODE_RET();
3313 }
3314 IEMOP_HLP_DONE_DECODING();
3315
3316 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3317}
3318
3319
3320/** Opcode 0x0f 0x21. */
3321FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3322{
3323 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3324 IEMOP_HLP_MIN_386();
3325 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3327 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3328 IEMOP_RAISE_INVALID_OPCODE_RET();
3329 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3330}
3331
3332
3333/** Opcode 0x0f 0x22. */
3334FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3335{
3336 /* mod is ignored, as is operand size overrides. */
3337 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3338 IEMOP_HLP_MIN_386();
3339 if (IEM_IS_64BIT_CODE(pVCpu))
3340 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3341 else
3342 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3343
3344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3345 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3346 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3347 {
3348 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3349 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3350 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3351 iCrReg |= 8;
3352 }
3353 switch (iCrReg)
3354 {
3355 case 0: case 2: case 3: case 4: case 8:
3356 break;
3357 default:
3358 IEMOP_RAISE_INVALID_OPCODE_RET();
3359 }
3360 IEMOP_HLP_DONE_DECODING();
3361
3362 if (iCrReg & (2 | 8))
3363 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3364 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3365 else
3366 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT,
3367 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3368}
3369
3370
3371/** Opcode 0x0f 0x23. */
3372FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3373{
3374 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3375 IEMOP_HLP_MIN_386();
3376 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3378 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3379 IEMOP_RAISE_INVALID_OPCODE_RET();
3380 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3381}
3382
3383
3384/** Opcode 0x0f 0x24. */
3385FNIEMOP_DEF(iemOp_mov_Rd_Td)
3386{
3387 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3388 IEMOP_HLP_MIN_386();
3389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3391 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3392 IEMOP_RAISE_INVALID_OPCODE_RET();
3393 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3394}
3395
3396
3397/** Opcode 0x0f 0x26. */
3398FNIEMOP_DEF(iemOp_mov_Td_Rd)
3399{
3400 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3401 IEMOP_HLP_MIN_386();
3402 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3404 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3405 IEMOP_RAISE_INVALID_OPCODE_RET();
3406 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3407}
3408
3409
3410/**
3411 * @opcode 0x28
3412 * @oppfx none
3413 * @opcpuid sse
3414 * @opgroup og_sse_simdfp_datamove
3415 * @opxcpttype 1
3416 * @optest op1=1 op2=2 -> op1=2
3417 * @optest op1=0 op2=-42 -> op1=-42
3418 */
3419FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3420{
3421 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3422 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3423 if (IEM_IS_MODRM_REG_MODE(bRm))
3424 {
3425 /*
3426 * Register, register.
3427 */
3428 IEM_MC_BEGIN(0, 0, 0);
3429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3430 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3431 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3432 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3433 IEM_GET_MODRM_RM(pVCpu, bRm));
3434 IEM_MC_ADVANCE_RIP_AND_FINISH();
3435 IEM_MC_END();
3436 }
3437 else
3438 {
3439 /*
3440 * Register, memory.
3441 */
3442 IEM_MC_BEGIN(0, 2, 0);
3443 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3445
3446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3448 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3449 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3450
3451 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3452 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3453
3454 IEM_MC_ADVANCE_RIP_AND_FINISH();
3455 IEM_MC_END();
3456 }
3457}
3458
3459/**
3460 * @opcode 0x28
3461 * @oppfx 66
3462 * @opcpuid sse2
3463 * @opgroup og_sse2_pcksclr_datamove
3464 * @opxcpttype 1
3465 * @optest op1=1 op2=2 -> op1=2
3466 * @optest op1=0 op2=-42 -> op1=-42
3467 */
3468FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3469{
3470 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3472 if (IEM_IS_MODRM_REG_MODE(bRm))
3473 {
3474 /*
3475 * Register, register.
3476 */
3477 IEM_MC_BEGIN(0, 0, 0);
3478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3479 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3480 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3481 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3482 IEM_GET_MODRM_RM(pVCpu, bRm));
3483 IEM_MC_ADVANCE_RIP_AND_FINISH();
3484 IEM_MC_END();
3485 }
3486 else
3487 {
3488 /*
3489 * Register, memory.
3490 */
3491 IEM_MC_BEGIN(0, 2, 0);
3492 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3494
3495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3498 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3499
3500 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3501 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3502
3503 IEM_MC_ADVANCE_RIP_AND_FINISH();
3504 IEM_MC_END();
3505 }
3506}
3507
3508/* Opcode 0xf3 0x0f 0x28 - invalid */
3509/* Opcode 0xf2 0x0f 0x28 - invalid */
3510
3511/**
3512 * @opcode 0x29
3513 * @oppfx none
3514 * @opcpuid sse
3515 * @opgroup og_sse_simdfp_datamove
3516 * @opxcpttype 1
3517 * @optest op1=1 op2=2 -> op1=2
3518 * @optest op1=0 op2=-42 -> op1=-42
3519 */
3520FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3521{
3522 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3523 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3524 if (IEM_IS_MODRM_REG_MODE(bRm))
3525 {
3526 /*
3527 * Register, register.
3528 */
3529 IEM_MC_BEGIN(0, 0, 0);
3530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3531 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3532 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3533 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3534 IEM_GET_MODRM_REG(pVCpu, bRm));
3535 IEM_MC_ADVANCE_RIP_AND_FINISH();
3536 IEM_MC_END();
3537 }
3538 else
3539 {
3540 /*
3541 * Memory, register.
3542 */
3543 IEM_MC_BEGIN(0, 2, 0);
3544 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3545 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3546
3547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3549 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3550 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3551
3552 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3553 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3554
3555 IEM_MC_ADVANCE_RIP_AND_FINISH();
3556 IEM_MC_END();
3557 }
3558}
3559
3560/**
3561 * @opcode 0x29
3562 * @oppfx 66
3563 * @opcpuid sse2
3564 * @opgroup og_sse2_pcksclr_datamove
3565 * @opxcpttype 1
3566 * @optest op1=1 op2=2 -> op1=2
3567 * @optest op1=0 op2=-42 -> op1=-42
3568 */
3569FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3570{
3571 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3572 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3573 if (IEM_IS_MODRM_REG_MODE(bRm))
3574 {
3575 /*
3576 * Register, register.
3577 */
3578 IEM_MC_BEGIN(0, 0, 0);
3579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3580 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3581 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3582 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3583 IEM_GET_MODRM_REG(pVCpu, bRm));
3584 IEM_MC_ADVANCE_RIP_AND_FINISH();
3585 IEM_MC_END();
3586 }
3587 else
3588 {
3589 /*
3590 * Memory, register.
3591 */
3592 IEM_MC_BEGIN(0, 2, 0);
3593 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3595
3596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3598 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3599 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3600
3601 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3602 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3603
3604 IEM_MC_ADVANCE_RIP_AND_FINISH();
3605 IEM_MC_END();
3606 }
3607}
3608
3609/* Opcode 0xf3 0x0f 0x29 - invalid */
3610/* Opcode 0xf2 0x0f 0x29 - invalid */
3611
3612
3613/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3614FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3615{
3616 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3617 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3618 if (IEM_IS_MODRM_REG_MODE(bRm))
3619 {
3620 /*
3621 * XMM, MMX
3622 */
3623 IEM_MC_BEGIN(3, 1, 0);
3624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3625 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3626 IEM_MC_LOCAL(X86XMMREG, Dst);
3627 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3628 IEM_MC_ARG(uint64_t, u64Src, 2);
3629 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3630 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3631 IEM_MC_PREPARE_FPU_USAGE();
3632 IEM_MC_FPU_TO_MMX_MODE();
3633
3634 IEM_MC_REF_MXCSR(pfMxcsr);
3635 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3636 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3637
3638 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3639 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3640 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3641 } IEM_MC_ELSE() {
3642 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3643 } IEM_MC_ENDIF();
3644
3645 IEM_MC_ADVANCE_RIP_AND_FINISH();
3646 IEM_MC_END();
3647 }
3648 else
3649 {
3650 /*
3651 * XMM, [mem64]
3652 */
3653 IEM_MC_BEGIN(3, 2, 0);
3654 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3655 IEM_MC_LOCAL(X86XMMREG, Dst);
3656 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3657 IEM_MC_ARG(uint64_t, u64Src, 2);
3658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3659
3660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3662 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3663 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3664 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3665
3666 IEM_MC_PREPARE_FPU_USAGE();
3667 IEM_MC_FPU_TO_MMX_MODE();
3668 IEM_MC_REF_MXCSR(pfMxcsr);
3669
3670 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3671 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3672 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3673 } IEM_MC_ELSE() {
3674 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3675 } IEM_MC_ENDIF();
3676
3677 IEM_MC_ADVANCE_RIP_AND_FINISH();
3678 IEM_MC_END();
3679 }
3680}
3681
3682
3683/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3684FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3685{
3686 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3687 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3688 if (IEM_IS_MODRM_REG_MODE(bRm))
3689 {
3690 /*
3691 * XMM, MMX
3692 */
3693 IEM_MC_BEGIN(3, 1, 0);
3694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3695 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3696 IEM_MC_LOCAL(X86XMMREG, Dst);
3697 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3698 IEM_MC_ARG(uint64_t, u64Src, 2);
3699 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3700 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3701 IEM_MC_PREPARE_FPU_USAGE();
3702 IEM_MC_FPU_TO_MMX_MODE();
3703
3704 IEM_MC_REF_MXCSR(pfMxcsr);
3705 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3706
3707 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3708 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3709 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3710 } IEM_MC_ELSE() {
3711 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3712 } IEM_MC_ENDIF();
3713
3714 IEM_MC_ADVANCE_RIP_AND_FINISH();
3715 IEM_MC_END();
3716 }
3717 else
3718 {
3719 /*
3720 * XMM, [mem64]
3721 */
3722 IEM_MC_BEGIN(3, 3, 0);
3723 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3724 IEM_MC_LOCAL(X86XMMREG, Dst);
3725 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3726 IEM_MC_ARG(uint64_t, u64Src, 2);
3727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3728
3729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3731 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3732 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3733 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3734
3735 /* Doesn't cause a transition to MMX mode. */
3736 IEM_MC_PREPARE_SSE_USAGE();
3737 IEM_MC_REF_MXCSR(pfMxcsr);
3738
3739 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3740 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3741 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3742 } IEM_MC_ELSE() {
3743 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3744 } IEM_MC_ENDIF();
3745
3746 IEM_MC_ADVANCE_RIP_AND_FINISH();
3747 IEM_MC_END();
3748 }
3749}
3750
3751
3752/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3753FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3754{
3755 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3756
3757 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3758 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3759 {
3760 if (IEM_IS_MODRM_REG_MODE(bRm))
3761 {
3762 /* XMM, greg64 */
3763 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT);
3764 IEM_MC_LOCAL(uint32_t, fMxcsr);
3765 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3766 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3767 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3768 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3769
3770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3771 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3772 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3773
3774 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3775 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3776 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3777 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3778 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3779 } IEM_MC_ELSE() {
3780 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3781 } IEM_MC_ENDIF();
3782
3783 IEM_MC_ADVANCE_RIP_AND_FINISH();
3784 IEM_MC_END();
3785 }
3786 else
3787 {
3788 /* XMM, [mem64] */
3789 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT);
3790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3791 IEM_MC_LOCAL(uint32_t, fMxcsr);
3792 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3793 IEM_MC_LOCAL(int64_t, i64Src);
3794 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3795 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3796 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3797
3798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3800 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3801 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3802
3803 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3804 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3805 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3806 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3807 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3808 } IEM_MC_ELSE() {
3809 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3810 } IEM_MC_ENDIF();
3811
3812 IEM_MC_ADVANCE_RIP_AND_FINISH();
3813 IEM_MC_END();
3814 }
3815 }
3816 else
3817 {
3818 if (IEM_IS_MODRM_REG_MODE(bRm))
3819 {
3820 /* greg, XMM */
3821 IEM_MC_BEGIN(3, 2, 0);
3822 IEM_MC_LOCAL(uint32_t, fMxcsr);
3823 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3824 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3825 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3826 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3827
3828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3829 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3830 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3831
3832 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3833 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3834 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3835 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3836 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3837 } IEM_MC_ELSE() {
3838 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3839 } IEM_MC_ENDIF();
3840
3841 IEM_MC_ADVANCE_RIP_AND_FINISH();
3842 IEM_MC_END();
3843 }
3844 else
3845 {
3846 /* greg, [mem32] */
3847 IEM_MC_BEGIN(3, 4, 0);
3848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3849 IEM_MC_LOCAL(uint32_t, fMxcsr);
3850 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3851 IEM_MC_LOCAL(int32_t, i32Src);
3852 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3853 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3854 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3855
3856 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3858 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3859 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3860
3861 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3862 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3863 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3864 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3865 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3866 } IEM_MC_ELSE() {
3867 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3868 } IEM_MC_ENDIF();
3869
3870 IEM_MC_ADVANCE_RIP_AND_FINISH();
3871 IEM_MC_END();
3872 }
3873 }
3874}
3875
3876
3877/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3878FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3879{
3880 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3881
3882 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3883 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3884 {
3885 if (IEM_IS_MODRM_REG_MODE(bRm))
3886 {
3887 /* XMM, greg64 */
3888 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT);
3889 IEM_MC_LOCAL(uint32_t, fMxcsr);
3890 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3891 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3892 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3893 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3894
3895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3896 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3897 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3898
3899 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3900 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3901 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3902 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3903 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3904 } IEM_MC_ELSE() {
3905 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3906 } IEM_MC_ENDIF();
3907
3908 IEM_MC_ADVANCE_RIP_AND_FINISH();
3909 IEM_MC_END();
3910 }
3911 else
3912 {
3913 /* XMM, [mem64] */
3914 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT);
3915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3916 IEM_MC_LOCAL(uint32_t, fMxcsr);
3917 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3918 IEM_MC_LOCAL(int64_t, i64Src);
3919 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3920 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3921 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3922
3923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3926 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3927
3928 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3929 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3930 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3931 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3932 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3933 } IEM_MC_ELSE() {
3934 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3935 } IEM_MC_ENDIF();
3936
3937 IEM_MC_ADVANCE_RIP_AND_FINISH();
3938 IEM_MC_END();
3939 }
3940 }
3941 else
3942 {
3943 if (IEM_IS_MODRM_REG_MODE(bRm))
3944 {
3945 /* XMM, greg32 */
3946 IEM_MC_BEGIN(3, 2, 0);
3947 IEM_MC_LOCAL(uint32_t, fMxcsr);
3948 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3949 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3950 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3951 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3952
3953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3954 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3955 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3956
3957 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3958 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3959 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3960 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3961 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3962 } IEM_MC_ELSE() {
3963 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3964 } IEM_MC_ENDIF();
3965
3966 IEM_MC_ADVANCE_RIP_AND_FINISH();
3967 IEM_MC_END();
3968 }
3969 else
3970 {
3971 /* XMM, [mem32] */
3972 IEM_MC_BEGIN(3, 4, 0);
3973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3974 IEM_MC_LOCAL(uint32_t, fMxcsr);
3975 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3976 IEM_MC_LOCAL(int32_t, i32Src);
3977 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3978 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3979 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3980
3981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3983 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3984 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3985
3986 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3987 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3988 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3989 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3990 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3991 } IEM_MC_ELSE() {
3992 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3993 } IEM_MC_ENDIF();
3994
3995 IEM_MC_ADVANCE_RIP_AND_FINISH();
3996 IEM_MC_END();
3997 }
3998 }
3999}
4000
4001
4002/**
4003 * @opcode 0x2b
4004 * @opcodesub !11 mr/reg
4005 * @oppfx none
4006 * @opcpuid sse
4007 * @opgroup og_sse1_cachect
4008 * @opxcpttype 1
4009 * @optest op1=1 op2=2 -> op1=2
4010 * @optest op1=0 op2=-42 -> op1=-42
4011 */
4012FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
4013{
4014 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4016 if (IEM_IS_MODRM_MEM_MODE(bRm))
4017 {
4018 /*
4019 * memory, register.
4020 */
4021 IEM_MC_BEGIN(0, 2, 0);
4022 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4024
4025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4027 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4028 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4029
4030 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4031 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4032
4033 IEM_MC_ADVANCE_RIP_AND_FINISH();
4034 IEM_MC_END();
4035 }
4036 /* The register, register encoding is invalid. */
4037 else
4038 IEMOP_RAISE_INVALID_OPCODE_RET();
4039}
4040
4041/**
4042 * @opcode 0x2b
4043 * @opcodesub !11 mr/reg
4044 * @oppfx 0x66
4045 * @opcpuid sse2
4046 * @opgroup og_sse2_cachect
4047 * @opxcpttype 1
4048 * @optest op1=1 op2=2 -> op1=2
4049 * @optest op1=0 op2=-42 -> op1=-42
4050 */
4051FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
4052{
4053 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4055 if (IEM_IS_MODRM_MEM_MODE(bRm))
4056 {
4057 /*
4058 * memory, register.
4059 */
4060 IEM_MC_BEGIN(0, 2, 0);
4061 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4063
4064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4066 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4067 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4068
4069 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4070 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4071
4072 IEM_MC_ADVANCE_RIP_AND_FINISH();
4073 IEM_MC_END();
4074 }
4075 /* The register, register encoding is invalid. */
4076 else
4077 IEMOP_RAISE_INVALID_OPCODE_RET();
4078}
4079/* Opcode 0xf3 0x0f 0x2b - invalid */
4080/* Opcode 0xf2 0x0f 0x2b - invalid */
4081
4082
4083/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
4084FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
4085{
4086 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4087 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4088 if (IEM_IS_MODRM_REG_MODE(bRm))
4089 {
4090 /*
4091 * Register, register.
4092 */
4093 IEM_MC_BEGIN(3, 1, 0);
4094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4095 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4096 IEM_MC_LOCAL(uint64_t, u64Dst);
4097 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4098 IEM_MC_ARG(uint64_t, u64Src, 2);
4099 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4100 IEM_MC_PREPARE_FPU_USAGE();
4101 IEM_MC_FPU_TO_MMX_MODE();
4102
4103 IEM_MC_REF_MXCSR(pfMxcsr);
4104 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4105
4106 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4107 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4108 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4109 } IEM_MC_ELSE() {
4110 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4111 } IEM_MC_ENDIF();
4112
4113 IEM_MC_ADVANCE_RIP_AND_FINISH();
4114 IEM_MC_END();
4115 }
4116 else
4117 {
4118 /*
4119 * Register, memory.
4120 */
4121 IEM_MC_BEGIN(3, 2, 0);
4122 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4123 IEM_MC_LOCAL(uint64_t, u64Dst);
4124 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4125 IEM_MC_ARG(uint64_t, u64Src, 2);
4126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4127
4128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4130 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4131 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4132
4133 IEM_MC_PREPARE_FPU_USAGE();
4134 IEM_MC_FPU_TO_MMX_MODE();
4135 IEM_MC_REF_MXCSR(pfMxcsr);
4136
4137 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4138 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4139 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4140 } IEM_MC_ELSE() {
4141 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4142 } IEM_MC_ENDIF();
4143
4144 IEM_MC_ADVANCE_RIP_AND_FINISH();
4145 IEM_MC_END();
4146 }
4147}
4148
4149
4150/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4151FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4152{
4153 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4154 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4155 if (IEM_IS_MODRM_REG_MODE(bRm))
4156 {
4157 /*
4158 * Register, register.
4159 */
4160 IEM_MC_BEGIN(3, 1, 0);
4161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4162 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4163 IEM_MC_LOCAL(uint64_t, u64Dst);
4164 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4165 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4166 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4167 IEM_MC_PREPARE_FPU_USAGE();
4168 IEM_MC_FPU_TO_MMX_MODE();
4169
4170 IEM_MC_REF_MXCSR(pfMxcsr);
4171 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4172
4173 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4174 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4175 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4176 } IEM_MC_ELSE() {
4177 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4178 } IEM_MC_ENDIF();
4179
4180 IEM_MC_ADVANCE_RIP_AND_FINISH();
4181 IEM_MC_END();
4182 }
4183 else
4184 {
4185 /*
4186 * Register, memory.
4187 */
4188 IEM_MC_BEGIN(3, 3, 0);
4189 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4190 IEM_MC_LOCAL(uint64_t, u64Dst);
4191 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4192 IEM_MC_LOCAL(X86XMMREG, uSrc);
4193 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4195
4196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4198 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4199 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4200
4201 IEM_MC_PREPARE_FPU_USAGE();
4202 IEM_MC_FPU_TO_MMX_MODE();
4203
4204 IEM_MC_REF_MXCSR(pfMxcsr);
4205
4206 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4207 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4208 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4209 } IEM_MC_ELSE() {
4210 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4211 } IEM_MC_ENDIF();
4212
4213 IEM_MC_ADVANCE_RIP_AND_FINISH();
4214 IEM_MC_END();
4215 }
4216}
4217
4218
4219/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4220FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4221{
4222 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4223
4224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4225 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4226 {
4227 if (IEM_IS_MODRM_REG_MODE(bRm))
4228 {
4229 /* greg64, XMM */
4230 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT);
4231 IEM_MC_LOCAL(uint32_t, fMxcsr);
4232 IEM_MC_LOCAL(int64_t, i64Dst);
4233 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4234 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4235 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4236
4237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4238 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4239 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4240
4241 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4242 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4243 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4244 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4245 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4246 } IEM_MC_ELSE() {
4247 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4248 } IEM_MC_ENDIF();
4249
4250 IEM_MC_ADVANCE_RIP_AND_FINISH();
4251 IEM_MC_END();
4252 }
4253 else
4254 {
4255 /* greg64, [mem64] */
4256 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT);
4257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4258 IEM_MC_LOCAL(uint32_t, fMxcsr);
4259 IEM_MC_LOCAL(int64_t, i64Dst);
4260 IEM_MC_LOCAL(uint32_t, u32Src);
4261 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4262 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4263 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4264
4265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4267 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4268 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4269
4270 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4271 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4272 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4273 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4274 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4275 } IEM_MC_ELSE() {
4276 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4277 } IEM_MC_ENDIF();
4278
4279 IEM_MC_ADVANCE_RIP_AND_FINISH();
4280 IEM_MC_END();
4281 }
4282 }
4283 else
4284 {
4285 if (IEM_IS_MODRM_REG_MODE(bRm))
4286 {
4287 /* greg, XMM */
4288 IEM_MC_BEGIN(3, 2, 0);
4289 IEM_MC_LOCAL(uint32_t, fMxcsr);
4290 IEM_MC_LOCAL(int32_t, i32Dst);
4291 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4292 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4293 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4294
4295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4296 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4297 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4298
4299 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4300 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4301 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4302 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4303 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4304 } IEM_MC_ELSE() {
4305 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4306 } IEM_MC_ENDIF();
4307
4308 IEM_MC_ADVANCE_RIP_AND_FINISH();
4309 IEM_MC_END();
4310 }
4311 else
4312 {
4313 /* greg, [mem] */
4314 IEM_MC_BEGIN(3, 4, 0);
4315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4316 IEM_MC_LOCAL(uint32_t, fMxcsr);
4317 IEM_MC_LOCAL(int32_t, i32Dst);
4318 IEM_MC_LOCAL(uint32_t, u32Src);
4319 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4320 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4321 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4322
4323 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4325 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4326 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4327
4328 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4329 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4330 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4331 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4332 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4333 } IEM_MC_ELSE() {
4334 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4335 } IEM_MC_ENDIF();
4336
4337 IEM_MC_ADVANCE_RIP_AND_FINISH();
4338 IEM_MC_END();
4339 }
4340 }
4341}
4342
4343
4344/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4345FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4346{
4347 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4348
4349 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4350 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4351 {
4352 if (IEM_IS_MODRM_REG_MODE(bRm))
4353 {
4354 /* greg64, XMM */
4355 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT);
4356 IEM_MC_LOCAL(uint32_t, fMxcsr);
4357 IEM_MC_LOCAL(int64_t, i64Dst);
4358 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4359 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4360 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4361
4362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4364 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4365
4366 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4367 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4368 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4369 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4370 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4371 } IEM_MC_ELSE() {
4372 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4373 } IEM_MC_ENDIF();
4374
4375 IEM_MC_ADVANCE_RIP_AND_FINISH();
4376 IEM_MC_END();
4377 }
4378 else
4379 {
4380 /* greg64, [mem64] */
4381 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT);
4382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4383 IEM_MC_LOCAL(uint32_t, fMxcsr);
4384 IEM_MC_LOCAL(int64_t, i64Dst);
4385 IEM_MC_LOCAL(uint64_t, u64Src);
4386 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4387 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4388 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4389
4390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4392 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4393 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4394
4395 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4396 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4397 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4398 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4399 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4400 } IEM_MC_ELSE() {
4401 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4402 } IEM_MC_ENDIF();
4403
4404 IEM_MC_ADVANCE_RIP_AND_FINISH();
4405 IEM_MC_END();
4406 }
4407 }
4408 else
4409 {
4410 if (IEM_IS_MODRM_REG_MODE(bRm))
4411 {
4412 /* greg, XMM */
4413 IEM_MC_BEGIN(3, 2, 0);
4414 IEM_MC_LOCAL(uint32_t, fMxcsr);
4415 IEM_MC_LOCAL(int32_t, i32Dst);
4416 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4417 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4418 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4419
4420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4421 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4422 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4423
4424 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4425 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4426 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4427 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4428 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4429 } IEM_MC_ELSE() {
4430 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4431 } IEM_MC_ENDIF();
4432
4433 IEM_MC_ADVANCE_RIP_AND_FINISH();
4434 IEM_MC_END();
4435 }
4436 else
4437 {
4438 /* greg32, [mem32] */
4439 IEM_MC_BEGIN(3, 4, 0);
4440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4441 IEM_MC_LOCAL(uint32_t, fMxcsr);
4442 IEM_MC_LOCAL(int32_t, i32Dst);
4443 IEM_MC_LOCAL(uint64_t, u64Src);
4444 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4445 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4446 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4447
4448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4450 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4451 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4452
4453 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4454 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4455 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4456 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4457 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4458 } IEM_MC_ELSE() {
4459 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4460 } IEM_MC_ENDIF();
4461
4462 IEM_MC_ADVANCE_RIP_AND_FINISH();
4463 IEM_MC_END();
4464 }
4465 }
4466}
4467
4468
4469/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4470FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4471{
4472 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4474 if (IEM_IS_MODRM_REG_MODE(bRm))
4475 {
4476 /*
4477 * Register, register.
4478 */
4479 IEM_MC_BEGIN(3, 1, 0);
4480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4481 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4482 IEM_MC_LOCAL(uint64_t, u64Dst);
4483 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4484 IEM_MC_ARG(uint64_t, u64Src, 2);
4485
4486 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4487 IEM_MC_PREPARE_FPU_USAGE();
4488 IEM_MC_FPU_TO_MMX_MODE();
4489
4490 IEM_MC_REF_MXCSR(pfMxcsr);
4491 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4492
4493 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4494 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4495 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4496 } IEM_MC_ELSE() {
4497 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4498 } IEM_MC_ENDIF();
4499
4500 IEM_MC_ADVANCE_RIP_AND_FINISH();
4501 IEM_MC_END();
4502 }
4503 else
4504 {
4505 /*
4506 * Register, memory.
4507 */
4508 IEM_MC_BEGIN(3, 2, 0);
4509 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4510 IEM_MC_LOCAL(uint64_t, u64Dst);
4511 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4512 IEM_MC_ARG(uint64_t, u64Src, 2);
4513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4514
4515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4517 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4518 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4519
4520 IEM_MC_PREPARE_FPU_USAGE();
4521 IEM_MC_FPU_TO_MMX_MODE();
4522 IEM_MC_REF_MXCSR(pfMxcsr);
4523
4524 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4525 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4526 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4527 } IEM_MC_ELSE() {
4528 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4529 } IEM_MC_ENDIF();
4530
4531 IEM_MC_ADVANCE_RIP_AND_FINISH();
4532 IEM_MC_END();
4533 }
4534}
4535
4536
4537/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4538FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4539{
4540 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4542 if (IEM_IS_MODRM_REG_MODE(bRm))
4543 {
4544 /*
4545 * Register, register.
4546 */
4547 IEM_MC_BEGIN(3, 1, 0);
4548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4549 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4550 IEM_MC_LOCAL(uint64_t, u64Dst);
4551 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4552 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4553
4554 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4555 IEM_MC_PREPARE_FPU_USAGE();
4556 IEM_MC_FPU_TO_MMX_MODE();
4557
4558 IEM_MC_REF_MXCSR(pfMxcsr);
4559 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4560
4561 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4562 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4563 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4564 } IEM_MC_ELSE() {
4565 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4566 } IEM_MC_ENDIF();
4567
4568 IEM_MC_ADVANCE_RIP_AND_FINISH();
4569 IEM_MC_END();
4570 }
4571 else
4572 {
4573 /*
4574 * Register, memory.
4575 */
4576 IEM_MC_BEGIN(3, 3, 0);
4577 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4578 IEM_MC_LOCAL(uint64_t, u64Dst);
4579 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4580 IEM_MC_LOCAL(X86XMMREG, uSrc);
4581 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4583
4584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4586 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4587 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4588
4589 IEM_MC_PREPARE_FPU_USAGE();
4590 IEM_MC_FPU_TO_MMX_MODE();
4591
4592 IEM_MC_REF_MXCSR(pfMxcsr);
4593
4594 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4595 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4596 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4597 } IEM_MC_ELSE() {
4598 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4599 } IEM_MC_ENDIF();
4600
4601 IEM_MC_ADVANCE_RIP_AND_FINISH();
4602 IEM_MC_END();
4603 }
4604}
4605
4606
4607/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4608FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4609{
4610 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4611
4612 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4613 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4614 {
4615 if (IEM_IS_MODRM_REG_MODE(bRm))
4616 {
4617 /* greg64, XMM */
4618 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT);
4619 IEM_MC_LOCAL(uint32_t, fMxcsr);
4620 IEM_MC_LOCAL(int64_t, i64Dst);
4621 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4622 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4623 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4624
4625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4626 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4627 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4628
4629 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4630 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4631 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4632 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4633 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4634 } IEM_MC_ELSE() {
4635 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4636 } IEM_MC_ENDIF();
4637
4638 IEM_MC_ADVANCE_RIP_AND_FINISH();
4639 IEM_MC_END();
4640 }
4641 else
4642 {
4643 /* greg64, [mem64] */
4644 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT);
4645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4646 IEM_MC_LOCAL(uint32_t, fMxcsr);
4647 IEM_MC_LOCAL(int64_t, i64Dst);
4648 IEM_MC_LOCAL(uint32_t, u32Src);
4649 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4650 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4651 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4652
4653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4655 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4656 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4657
4658 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4659 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4660 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4661 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4662 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4663 } IEM_MC_ELSE() {
4664 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4665 } IEM_MC_ENDIF();
4666
4667 IEM_MC_ADVANCE_RIP_AND_FINISH();
4668 IEM_MC_END();
4669 }
4670 }
4671 else
4672 {
4673 if (IEM_IS_MODRM_REG_MODE(bRm))
4674 {
4675 /* greg, XMM */
4676 IEM_MC_BEGIN(3, 2, 0);
4677 IEM_MC_LOCAL(uint32_t, fMxcsr);
4678 IEM_MC_LOCAL(int32_t, i32Dst);
4679 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4680 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4681 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4682
4683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4684 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4685 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4686
4687 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4688 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4689 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4690 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4691 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4692 } IEM_MC_ELSE() {
4693 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4694 } IEM_MC_ENDIF();
4695
4696 IEM_MC_ADVANCE_RIP_AND_FINISH();
4697 IEM_MC_END();
4698 }
4699 else
4700 {
4701 /* greg, [mem] */
4702 IEM_MC_BEGIN(3, 4, 0);
4703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4704 IEM_MC_LOCAL(uint32_t, fMxcsr);
4705 IEM_MC_LOCAL(int32_t, i32Dst);
4706 IEM_MC_LOCAL(uint32_t, u32Src);
4707 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4708 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4709 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4710
4711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4713 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4714 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4715
4716 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4717 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4718 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4719 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4720 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4721 } IEM_MC_ELSE() {
4722 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4723 } IEM_MC_ENDIF();
4724
4725 IEM_MC_ADVANCE_RIP_AND_FINISH();
4726 IEM_MC_END();
4727 }
4728 }
4729}
4730
4731
4732/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4733FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4734{
4735 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4736
4737 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4738 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4739 {
4740 if (IEM_IS_MODRM_REG_MODE(bRm))
4741 {
4742 /* greg64, XMM */
4743 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT);
4744 IEM_MC_LOCAL(uint32_t, fMxcsr);
4745 IEM_MC_LOCAL(int64_t, i64Dst);
4746 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4747 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4748 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4749
4750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4751 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4752 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4753
4754 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4755 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4756 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4757 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4758 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4759 } IEM_MC_ELSE() {
4760 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4761 } IEM_MC_ENDIF();
4762
4763 IEM_MC_ADVANCE_RIP_AND_FINISH();
4764 IEM_MC_END();
4765 }
4766 else
4767 {
4768 /* greg64, [mem64] */
4769 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT);
4770 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4771 IEM_MC_LOCAL(uint32_t, fMxcsr);
4772 IEM_MC_LOCAL(int64_t, i64Dst);
4773 IEM_MC_LOCAL(uint64_t, u64Src);
4774 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4775 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4776 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4777
4778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4780 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4781 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4782
4783 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4784 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4785 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4786 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4787 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4788 } IEM_MC_ELSE() {
4789 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4790 } IEM_MC_ENDIF();
4791
4792 IEM_MC_ADVANCE_RIP_AND_FINISH();
4793 IEM_MC_END();
4794 }
4795 }
4796 else
4797 {
4798 if (IEM_IS_MODRM_REG_MODE(bRm))
4799 {
4800 /* greg32, XMM */
4801 IEM_MC_BEGIN(3, 2, 0);
4802 IEM_MC_LOCAL(uint32_t, fMxcsr);
4803 IEM_MC_LOCAL(int32_t, i32Dst);
4804 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4805 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4806 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4807
4808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4809 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4810 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4811
4812 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4813 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4814 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4815 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4816 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4817 } IEM_MC_ELSE() {
4818 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4819 } IEM_MC_ENDIF();
4820
4821 IEM_MC_ADVANCE_RIP_AND_FINISH();
4822 IEM_MC_END();
4823 }
4824 else
4825 {
4826 /* greg32, [mem64] */
4827 IEM_MC_BEGIN(3, 4, 0);
4828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4829 IEM_MC_LOCAL(uint32_t, fMxcsr);
4830 IEM_MC_LOCAL(int32_t, i32Dst);
4831 IEM_MC_LOCAL(uint64_t, u64Src);
4832 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4833 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4834 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4835
4836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4838 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4839 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4840
4841 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4842 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4843 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4844 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4845 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4846 } IEM_MC_ELSE() {
4847 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4848 } IEM_MC_ENDIF();
4849
4850 IEM_MC_ADVANCE_RIP_AND_FINISH();
4851 IEM_MC_END();
4852 }
4853 }
4854}
4855
4856
4857/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
4858FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4859{
4860 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4861 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4862 if (IEM_IS_MODRM_REG_MODE(bRm))
4863 {
4864 /*
4865 * Register, register.
4866 */
4867 IEM_MC_BEGIN(4, 1, 0);
4868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4869 IEM_MC_LOCAL(uint32_t, fEFlags);
4870 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4871 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4872 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4873 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4874 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4875 IEM_MC_PREPARE_SSE_USAGE();
4876 IEM_MC_FETCH_EFLAGS(fEFlags);
4877 IEM_MC_REF_MXCSR(pfMxcsr);
4878 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4879 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4880 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4881 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4882 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4883 } IEM_MC_ELSE() {
4884 IEM_MC_COMMIT_EFLAGS(fEFlags);
4885 } IEM_MC_ENDIF();
4886
4887 IEM_MC_ADVANCE_RIP_AND_FINISH();
4888 IEM_MC_END();
4889 }
4890 else
4891 {
4892 /*
4893 * Register, memory.
4894 */
4895 IEM_MC_BEGIN(4, 3, 0);
4896 IEM_MC_LOCAL(uint32_t, fEFlags);
4897 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4898 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4899 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4900 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4901 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4903
4904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4906 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4907 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4908
4909 IEM_MC_PREPARE_SSE_USAGE();
4910 IEM_MC_FETCH_EFLAGS(fEFlags);
4911 IEM_MC_REF_MXCSR(pfMxcsr);
4912 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4913 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4914 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4915 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4916 } IEM_MC_ELSE() {
4917 IEM_MC_COMMIT_EFLAGS(fEFlags);
4918 } IEM_MC_ENDIF();
4919
4920 IEM_MC_ADVANCE_RIP_AND_FINISH();
4921 IEM_MC_END();
4922 }
4923}
4924
4925
4926/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
4927FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4928{
4929 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4931 if (IEM_IS_MODRM_REG_MODE(bRm))
4932 {
4933 /*
4934 * Register, register.
4935 */
4936 IEM_MC_BEGIN(4, 1, 0);
4937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4938 IEM_MC_LOCAL(uint32_t, fEFlags);
4939 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4940 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4941 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4942 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4943 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4944 IEM_MC_PREPARE_SSE_USAGE();
4945 IEM_MC_FETCH_EFLAGS(fEFlags);
4946 IEM_MC_REF_MXCSR(pfMxcsr);
4947 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4948 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4949 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4950 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4951 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4952 } IEM_MC_ELSE() {
4953 IEM_MC_COMMIT_EFLAGS(fEFlags);
4954 } IEM_MC_ENDIF();
4955
4956 IEM_MC_ADVANCE_RIP_AND_FINISH();
4957 IEM_MC_END();
4958 }
4959 else
4960 {
4961 /*
4962 * Register, memory.
4963 */
4964 IEM_MC_BEGIN(4, 3, 0);
4965 IEM_MC_LOCAL(uint32_t, fEFlags);
4966 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4967 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4968 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4969 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4970 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4972
4973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4975 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4976 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4977
4978 IEM_MC_PREPARE_SSE_USAGE();
4979 IEM_MC_FETCH_EFLAGS(fEFlags);
4980 IEM_MC_REF_MXCSR(pfMxcsr);
4981 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4982 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4983 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4984 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4985 } IEM_MC_ELSE() {
4986 IEM_MC_COMMIT_EFLAGS(fEFlags);
4987 } IEM_MC_ENDIF();
4988
4989 IEM_MC_ADVANCE_RIP_AND_FINISH();
4990 IEM_MC_END();
4991 }
4992}
4993
4994
4995/* Opcode 0xf3 0x0f 0x2e - invalid */
4996/* Opcode 0xf2 0x0f 0x2e - invalid */
4997
4998
4999/** Opcode 0x0f 0x2f - comiss Vss, Wss */
5000FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
5001{
5002 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5004 if (IEM_IS_MODRM_REG_MODE(bRm))
5005 {
5006 /*
5007 * Register, register.
5008 */
5009 IEM_MC_BEGIN(4, 1, 0);
5010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5011 IEM_MC_LOCAL(uint32_t, fEFlags);
5012 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5013 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5014 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5015 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5016 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5017 IEM_MC_PREPARE_SSE_USAGE();
5018 IEM_MC_FETCH_EFLAGS(fEFlags);
5019 IEM_MC_REF_MXCSR(pfMxcsr);
5020 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5021 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5022 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5023 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5024 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5025 } IEM_MC_ELSE() {
5026 IEM_MC_COMMIT_EFLAGS(fEFlags);
5027 } IEM_MC_ENDIF();
5028
5029 IEM_MC_ADVANCE_RIP_AND_FINISH();
5030 IEM_MC_END();
5031 }
5032 else
5033 {
5034 /*
5035 * Register, memory.
5036 */
5037 IEM_MC_BEGIN(4, 3, 0);
5038 IEM_MC_LOCAL(uint32_t, fEFlags);
5039 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5040 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5041 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5042 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5043 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5045
5046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5048 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5049 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5050
5051 IEM_MC_PREPARE_SSE_USAGE();
5052 IEM_MC_FETCH_EFLAGS(fEFlags);
5053 IEM_MC_REF_MXCSR(pfMxcsr);
5054 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5055 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5056 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5057 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5058 } IEM_MC_ELSE() {
5059 IEM_MC_COMMIT_EFLAGS(fEFlags);
5060 } IEM_MC_ENDIF();
5061
5062 IEM_MC_ADVANCE_RIP_AND_FINISH();
5063 IEM_MC_END();
5064 }
5065}
5066
5067
5068/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
5069FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
5070{
5071 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5072 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5073 if (IEM_IS_MODRM_REG_MODE(bRm))
5074 {
5075 /*
5076 * Register, register.
5077 */
5078 IEM_MC_BEGIN(4, 1, 0);
5079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5080 IEM_MC_LOCAL(uint32_t, fEFlags);
5081 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5082 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5083 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5084 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5085 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5086 IEM_MC_PREPARE_SSE_USAGE();
5087 IEM_MC_FETCH_EFLAGS(fEFlags);
5088 IEM_MC_REF_MXCSR(pfMxcsr);
5089 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5090 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5091 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5092 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5093 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5094 } IEM_MC_ELSE() {
5095 IEM_MC_COMMIT_EFLAGS(fEFlags);
5096 } IEM_MC_ENDIF();
5097
5098 IEM_MC_ADVANCE_RIP_AND_FINISH();
5099 IEM_MC_END();
5100 }
5101 else
5102 {
5103 /*
5104 * Register, memory.
5105 */
5106 IEM_MC_BEGIN(4, 3, 0);
5107 IEM_MC_LOCAL(uint32_t, fEFlags);
5108 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5109 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5110 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5111 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5112 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5114
5115 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5117 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5118 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5119
5120 IEM_MC_PREPARE_SSE_USAGE();
5121 IEM_MC_FETCH_EFLAGS(fEFlags);
5122 IEM_MC_REF_MXCSR(pfMxcsr);
5123 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5124 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5125 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5126 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5127 } IEM_MC_ELSE() {
5128 IEM_MC_COMMIT_EFLAGS(fEFlags);
5129 } IEM_MC_ENDIF();
5130
5131 IEM_MC_ADVANCE_RIP_AND_FINISH();
5132 IEM_MC_END();
5133 }
5134}
5135
5136
5137/* Opcode 0xf3 0x0f 0x2f - invalid */
5138/* Opcode 0xf2 0x0f 0x2f - invalid */
5139
5140/** Opcode 0x0f 0x30. */
5141FNIEMOP_DEF(iemOp_wrmsr)
5142{
5143 IEMOP_MNEMONIC(wrmsr, "wrmsr");
5144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5145 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_wrmsr);
5146}
5147
5148
5149/** Opcode 0x0f 0x31. */
5150FNIEMOP_DEF(iemOp_rdtsc)
5151{
5152 IEMOP_MNEMONIC(rdtsc, "rdtsc");
5153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5154 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdtsc);
5155}
5156
5157
5158/** Opcode 0x0f 0x33. */
5159FNIEMOP_DEF(iemOp_rdmsr)
5160{
5161 IEMOP_MNEMONIC(rdmsr, "rdmsr");
5162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5163 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdmsr);
5164}
5165
5166
5167/** Opcode 0x0f 0x34. */
5168FNIEMOP_DEF(iemOp_rdpmc)
5169{
5170 IEMOP_MNEMONIC(rdpmc, "rdpmc");
5171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5172 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdpmc);
5173}
5174
5175
5176/** Opcode 0x0f 0x34. */
5177FNIEMOP_DEF(iemOp_sysenter)
5178{
5179 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5181 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
5182 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
5183 iemCImpl_sysenter);
5184}
5185
5186/** Opcode 0x0f 0x35. */
5187FNIEMOP_DEF(iemOp_sysexit)
5188{
5189 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5191 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
5192 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
5193 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
5194}
5195
5196/** Opcode 0x0f 0x37. */
5197FNIEMOP_STUB(iemOp_getsec);
5198
5199
5200/** Opcode 0x0f 0x38. */
5201FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
5202{
5203#ifdef IEM_WITH_THREE_0F_38
5204 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5205 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5206#else
5207 IEMOP_BITCH_ABOUT_STUB();
5208 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5209#endif
5210}
5211
5212
5213/** Opcode 0x0f 0x3a. */
5214FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
5215{
5216#ifdef IEM_WITH_THREE_0F_3A
5217 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5218 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5219#else
5220 IEMOP_BITCH_ABOUT_STUB();
5221 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5222#endif
5223}
5224
5225
5226/**
5227 * Implements a conditional move.
5228 *
5229 * Wish there was an obvious way to do this where we could share and reduce
5230 * code bloat.
5231 *
5232 * @param a_Cnd The conditional "microcode" operation.
5233 */
5234#define CMOV_X(a_Cnd) \
5235 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5236 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5237 { \
5238 switch (pVCpu->iem.s.enmEffOpSize) \
5239 { \
5240 case IEMMODE_16BIT: \
5241 IEM_MC_BEGIN(0, 1, 0); \
5242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5243 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5244 a_Cnd { \
5245 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5246 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5247 } IEM_MC_ENDIF(); \
5248 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5249 IEM_MC_END(); \
5250 break; \
5251 \
5252 case IEMMODE_32BIT: \
5253 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386); \
5254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5255 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5256 a_Cnd { \
5257 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5258 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5259 } IEM_MC_ELSE() { \
5260 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5261 } IEM_MC_ENDIF(); \
5262 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5263 IEM_MC_END(); \
5264 break; \
5265 \
5266 case IEMMODE_64BIT: \
5267 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT); \
5268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5269 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5270 a_Cnd { \
5271 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5272 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5273 } IEM_MC_ENDIF(); \
5274 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5275 IEM_MC_END(); \
5276 break; \
5277 \
5278 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5279 } \
5280 } \
5281 else \
5282 { \
5283 switch (pVCpu->iem.s.enmEffOpSize) \
5284 { \
5285 case IEMMODE_16BIT: \
5286 IEM_MC_BEGIN(0, 2, 0); \
5287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5288 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5291 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5292 a_Cnd { \
5293 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5294 } IEM_MC_ENDIF(); \
5295 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5296 IEM_MC_END(); \
5297 break; \
5298 \
5299 case IEMMODE_32BIT: \
5300 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386); \
5301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5302 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5305 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5306 a_Cnd { \
5307 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5308 } IEM_MC_ELSE() { \
5309 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5310 } IEM_MC_ENDIF(); \
5311 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5312 IEM_MC_END(); \
5313 break; \
5314 \
5315 case IEMMODE_64BIT: \
5316 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT); \
5317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5318 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5321 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5322 a_Cnd { \
5323 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5324 } IEM_MC_ENDIF(); \
5325 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5326 IEM_MC_END(); \
5327 break; \
5328 \
5329 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5330 } \
5331 } do {} while (0)
5332
5333
5334
5335/** Opcode 0x0f 0x40. */
5336FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5337{
5338 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5339 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5340}
5341
5342
5343/** Opcode 0x0f 0x41. */
5344FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5345{
5346 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5347 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5348}
5349
5350
5351/** Opcode 0x0f 0x42. */
5352FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5353{
5354 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5355 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5356}
5357
5358
5359/** Opcode 0x0f 0x43. */
5360FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5361{
5362 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5363 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5364}
5365
5366
5367/** Opcode 0x0f 0x44. */
5368FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5369{
5370 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5371 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5372}
5373
5374
5375/** Opcode 0x0f 0x45. */
5376FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5377{
5378 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5379 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5380}
5381
5382
5383/** Opcode 0x0f 0x46. */
5384FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5385{
5386 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5387 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5388}
5389
5390
5391/** Opcode 0x0f 0x47. */
5392FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5393{
5394 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5395 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5396}
5397
5398
5399/** Opcode 0x0f 0x48. */
5400FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5401{
5402 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5403 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5404}
5405
5406
5407/** Opcode 0x0f 0x49. */
5408FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5409{
5410 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5411 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5412}
5413
5414
5415/** Opcode 0x0f 0x4a. */
5416FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5417{
5418 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5419 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5420}
5421
5422
5423/** Opcode 0x0f 0x4b. */
5424FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5425{
5426 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5427 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5428}
5429
5430
5431/** Opcode 0x0f 0x4c. */
5432FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5433{
5434 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5435 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5436}
5437
5438
5439/** Opcode 0x0f 0x4d. */
5440FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5441{
5442 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5443 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5444}
5445
5446
5447/** Opcode 0x0f 0x4e. */
5448FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5449{
5450 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5451 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5452}
5453
5454
5455/** Opcode 0x0f 0x4f. */
5456FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5457{
5458 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5459 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5460}
5461
5462#undef CMOV_X
5463
5464/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5465FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5466{
5467 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5468 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5469 if (IEM_IS_MODRM_REG_MODE(bRm))
5470 {
5471 /*
5472 * Register, register.
5473 */
5474 IEM_MC_BEGIN(2, 1, 0);
5475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5476 IEM_MC_LOCAL(uint8_t, u8Dst);
5477 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5478 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5479 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5480 IEM_MC_PREPARE_SSE_USAGE();
5481 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5482 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5483 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5484 IEM_MC_ADVANCE_RIP_AND_FINISH();
5485 IEM_MC_END();
5486 }
5487 /* No memory operand. */
5488 else
5489 IEMOP_RAISE_INVALID_OPCODE_RET();
5490}
5491
5492
5493/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5494FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5495{
5496 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5497 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5498 if (IEM_IS_MODRM_REG_MODE(bRm))
5499 {
5500 /*
5501 * Register, register.
5502 */
5503 IEM_MC_BEGIN(2, 1, 0);
5504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5505 IEM_MC_LOCAL(uint8_t, u8Dst);
5506 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5507 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5508 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5509 IEM_MC_PREPARE_SSE_USAGE();
5510 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5511 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5512 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG_8(bRm), u8Dst);
5513 IEM_MC_ADVANCE_RIP_AND_FINISH();
5514 IEM_MC_END();
5515 }
5516 /* No memory operand. */
5517 else
5518 IEMOP_RAISE_INVALID_OPCODE_RET();
5519
5520}
5521
5522
5523/* Opcode 0xf3 0x0f 0x50 - invalid */
5524/* Opcode 0xf2 0x0f 0x50 - invalid */
5525
5526
5527/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5528FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5529{
5530 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5531 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5532}
5533
5534
5535/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5536FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5537{
5538 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5539 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5540}
5541
5542
5543/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5544FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5545{
5546 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5547 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5548}
5549
5550
5551/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5552FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5553{
5554 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5555 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5556}
5557
5558
5559/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5560FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5561{
5562 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5563 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5564}
5565
5566
5567/* Opcode 0x66 0x0f 0x52 - invalid */
5568
5569
5570/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5571FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5572{
5573 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5574 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5575}
5576
5577
5578/* Opcode 0xf2 0x0f 0x52 - invalid */
5579
5580/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5581FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
5582/* Opcode 0x66 0x0f 0x53 - invalid */
5583/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5584FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
5585/* Opcode 0xf2 0x0f 0x53 - invalid */
5586
5587
5588/** Opcode 0x0f 0x54 - andps Vps, Wps */
5589FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5590{
5591 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5592 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pand_u128);
5593}
5594
5595
5596/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5597FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5598{
5599 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5600 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5601}
5602
5603
5604/* Opcode 0xf3 0x0f 0x54 - invalid */
5605/* Opcode 0xf2 0x0f 0x54 - invalid */
5606
5607
5608/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5609FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5610{
5611 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5612 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pandn_u128);
5613}
5614
5615
5616/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5617FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5618{
5619 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5620 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5621}
5622
5623
5624/* Opcode 0xf3 0x0f 0x55 - invalid */
5625/* Opcode 0xf2 0x0f 0x55 - invalid */
5626
5627
5628/** Opcode 0x0f 0x56 - orps Vps, Wps */
5629FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5630{
5631 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5632 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_por_u128);
5633}
5634
5635
5636/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5637FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5638{
5639 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5640 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5641}
5642
5643
5644/* Opcode 0xf3 0x0f 0x56 - invalid */
5645/* Opcode 0xf2 0x0f 0x56 - invalid */
5646
5647
5648/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5649FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5650{
5651 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5652 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pxor_u128);
5653}
5654
5655
5656/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5657FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5658{
5659 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5660 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5661}
5662
5663
5664/* Opcode 0xf3 0x0f 0x57 - invalid */
5665/* Opcode 0xf2 0x0f 0x57 - invalid */
5666
5667/** Opcode 0x0f 0x58 - addps Vps, Wps */
5668FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5669{
5670 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5671 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5672}
5673
5674
5675/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5676FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5677{
5678 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5679 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5680}
5681
5682
5683/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5684FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5685{
5686 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5687 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5688}
5689
5690
5691/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5692FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5693{
5694 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5695 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5696}
5697
5698
5699/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5700FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5701{
5702 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5703 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5704}
5705
5706
5707/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5708FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5709{
5710 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5711 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5712}
5713
5714
5715/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5716FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5717{
5718 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5719 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5720}
5721
5722
5723/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5724FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5725{
5726 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5727 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5728}
5729
5730
5731/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5732FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5733{
5734 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5735 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5736}
5737
5738
5739/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5740FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5741{
5742 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5743 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5744}
5745
5746
5747/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5748FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5749{
5750 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5751 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5752}
5753
5754
5755/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5756FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5757{
5758 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5759 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5760}
5761
5762
5763/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5764FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5765{
5766 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5767 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5768}
5769
5770
5771/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5772FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5773{
5774 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5775 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5776}
5777
5778
5779/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5780FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5781{
5782 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5783 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5784}
5785
5786
5787/* Opcode 0xf2 0x0f 0x5b - invalid */
5788
5789
5790/** Opcode 0x0f 0x5c - subps Vps, Wps */
5791FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5792{
5793 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5794 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5795}
5796
5797
5798/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5799FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5800{
5801 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5802 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5803}
5804
5805
5806/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5807FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5808{
5809 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5810 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5811}
5812
5813
5814/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5815FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5816{
5817 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5818 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5819}
5820
5821
5822/** Opcode 0x0f 0x5d - minps Vps, Wps */
5823FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5824{
5825 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5826 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5827}
5828
5829
5830/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5831FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5832{
5833 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5834 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5835}
5836
5837
5838/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5839FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5840{
5841 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5842 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5843}
5844
5845
5846/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5847FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5848{
5849 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5850 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5851}
5852
5853
5854/** Opcode 0x0f 0x5e - divps Vps, Wps */
5855FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5856{
5857 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5858 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5859}
5860
5861
5862/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5863FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5864{
5865 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5866 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5867}
5868
5869
5870/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5871FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5872{
5873 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5874 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5875}
5876
5877
5878/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5879FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5880{
5881 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5882 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5883}
5884
5885
5886/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5887FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5888{
5889 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5890 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5891}
5892
5893
5894/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5895FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5896{
5897 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5898 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5899}
5900
5901
5902/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5903FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5904{
5905 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5906 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5907}
5908
5909
5910/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5911FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5912{
5913 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5914 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5915}
5916
5917
5918/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5919FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5920{
5921 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5922 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5923}
5924
5925
5926/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5927FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5928{
5929 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5930 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5931}
5932
5933
5934/* Opcode 0xf3 0x0f 0x60 - invalid */
5935
5936
5937/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5938FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5939{
5940 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5941 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5942 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5943}
5944
5945
5946/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5947FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5948{
5949 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5950 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5951}
5952
5953
5954/* Opcode 0xf3 0x0f 0x61 - invalid */
5955
5956
5957/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5958FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5959{
5960 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5961 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5962}
5963
5964
5965/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5966FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5967{
5968 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5969 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5970}
5971
5972
5973/* Opcode 0xf3 0x0f 0x62 - invalid */
5974
5975
5976
5977/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5978FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5979{
5980 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5981 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5982}
5983
5984
5985/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5986FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5987{
5988 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5989 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5990}
5991
5992
5993/* Opcode 0xf3 0x0f 0x63 - invalid */
5994
5995
5996/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5997FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5998{
5999 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6000 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
6001}
6002
6003
6004/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
6005FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
6006{
6007 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6008 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
6009}
6010
6011
6012/* Opcode 0xf3 0x0f 0x64 - invalid */
6013
6014
6015/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
6016FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
6017{
6018 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6019 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
6020}
6021
6022
6023/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
6024FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
6025{
6026 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6027 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
6028}
6029
6030
6031/* Opcode 0xf3 0x0f 0x65 - invalid */
6032
6033
6034/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
6035FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
6036{
6037 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6038 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
6039}
6040
6041
6042/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
6043FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
6044{
6045 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6046 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
6047}
6048
6049
6050/* Opcode 0xf3 0x0f 0x66 - invalid */
6051
6052
6053/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
6054FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
6055{
6056 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6057 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
6058}
6059
6060
6061/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
6062FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
6063{
6064 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6065 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
6066}
6067
6068
6069/* Opcode 0xf3 0x0f 0x67 - invalid */
6070
6071
6072/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
6073 * @note Intel and AMD both uses Qd for the second parameter, however they
6074 * both list it as a mmX/mem64 operand and intel describes it as being
6075 * loaded as a qword, so it should be Qq, shouldn't it? */
6076FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
6077{
6078 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6079 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
6080}
6081
6082
6083/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
6084FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
6085{
6086 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6087 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
6088}
6089
6090
6091/* Opcode 0xf3 0x0f 0x68 - invalid */
6092
6093
6094/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
6095 * @note Intel and AMD both uses Qd for the second parameter, however they
6096 * both list it as a mmX/mem64 operand and intel describes it as being
6097 * loaded as a qword, so it should be Qq, shouldn't it? */
6098FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
6099{
6100 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6101 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
6102}
6103
6104
6105/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
6106FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
6107{
6108 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6109 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
6110
6111}
6112
6113
6114/* Opcode 0xf3 0x0f 0x69 - invalid */
6115
6116
6117/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
6118 * @note Intel and AMD both uses Qd for the second parameter, however they
6119 * both list it as a mmX/mem64 operand and intel describes it as being
6120 * loaded as a qword, so it should be Qq, shouldn't it? */
6121FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
6122{
6123 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6124 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
6125}
6126
6127
6128/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
6129FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
6130{
6131 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6132 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
6133}
6134
6135
6136/* Opcode 0xf3 0x0f 0x6a - invalid */
6137
6138
6139/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
6140FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
6141{
6142 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6143 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
6144}
6145
6146
6147/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
6148FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
6149{
6150 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6151 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
6152}
6153
6154
6155/* Opcode 0xf3 0x0f 0x6b - invalid */
6156
6157
6158/* Opcode 0x0f 0x6c - invalid */
6159
6160
6161/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
6162FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
6163{
6164 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6165 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
6166}
6167
6168
6169/* Opcode 0xf3 0x0f 0x6c - invalid */
6170/* Opcode 0xf2 0x0f 0x6c - invalid */
6171
6172
6173/* Opcode 0x0f 0x6d - invalid */
6174
6175
6176/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
6177FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
6178{
6179 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6180 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6181}
6182
6183
6184/* Opcode 0xf3 0x0f 0x6d - invalid */
6185
6186
6187FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6188{
6189 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6190 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6191 {
6192 /**
6193 * @opcode 0x6e
6194 * @opcodesub rex.w=1
6195 * @oppfx none
6196 * @opcpuid mmx
6197 * @opgroup og_mmx_datamove
6198 * @opxcpttype 5
6199 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6200 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6201 */
6202 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6203 if (IEM_IS_MODRM_REG_MODE(bRm))
6204 {
6205 /* MMX, greg64 */
6206 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
6207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6208 IEM_MC_LOCAL(uint64_t, u64Tmp);
6209
6210 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6211 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6212 IEM_MC_FPU_TO_MMX_MODE();
6213
6214 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6215 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6216
6217 IEM_MC_ADVANCE_RIP_AND_FINISH();
6218 IEM_MC_END();
6219 }
6220 else
6221 {
6222 /* MMX, [mem64] */
6223 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
6224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6225 IEM_MC_LOCAL(uint64_t, u64Tmp);
6226
6227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6229 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6230 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6231 IEM_MC_FPU_TO_MMX_MODE();
6232
6233 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6234 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6235
6236 IEM_MC_ADVANCE_RIP_AND_FINISH();
6237 IEM_MC_END();
6238 }
6239 }
6240 else
6241 {
6242 /**
6243 * @opdone
6244 * @opcode 0x6e
6245 * @opcodesub rex.w=0
6246 * @oppfx none
6247 * @opcpuid mmx
6248 * @opgroup og_mmx_datamove
6249 * @opxcpttype 5
6250 * @opfunction iemOp_movd_q_Pd_Ey
6251 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6252 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6253 */
6254 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6255 if (IEM_IS_MODRM_REG_MODE(bRm))
6256 {
6257 /* MMX, greg32 */
6258 IEM_MC_BEGIN(0, 1, 0);
6259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6260 IEM_MC_LOCAL(uint32_t, u32Tmp);
6261
6262 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6263 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6264 IEM_MC_FPU_TO_MMX_MODE();
6265
6266 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6267 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6268
6269 IEM_MC_ADVANCE_RIP_AND_FINISH();
6270 IEM_MC_END();
6271 }
6272 else
6273 {
6274 /* MMX, [mem32] */
6275 IEM_MC_BEGIN(0, 2, 0);
6276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6277 IEM_MC_LOCAL(uint32_t, u32Tmp);
6278
6279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6281 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6282 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6283 IEM_MC_FPU_TO_MMX_MODE();
6284
6285 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6286 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6287
6288 IEM_MC_ADVANCE_RIP_AND_FINISH();
6289 IEM_MC_END();
6290 }
6291 }
6292}
6293
6294FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6295{
6296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6297 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6298 {
6299 /**
6300 * @opcode 0x6e
6301 * @opcodesub rex.w=1
6302 * @oppfx 0x66
6303 * @opcpuid sse2
6304 * @opgroup og_sse2_simdint_datamove
6305 * @opxcpttype 5
6306 * @optest 64-bit / op1=1 op2=2 -> op1=2
6307 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6308 */
6309 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6310 if (IEM_IS_MODRM_REG_MODE(bRm))
6311 {
6312 /* XMM, greg64 */
6313 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
6314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6315 IEM_MC_LOCAL(uint64_t, u64Tmp);
6316
6317 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6318 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6319
6320 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6321 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6322
6323 IEM_MC_ADVANCE_RIP_AND_FINISH();
6324 IEM_MC_END();
6325 }
6326 else
6327 {
6328 /* XMM, [mem64] */
6329 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
6330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6331 IEM_MC_LOCAL(uint64_t, u64Tmp);
6332
6333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6335 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6336 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6337
6338 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6339 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6340
6341 IEM_MC_ADVANCE_RIP_AND_FINISH();
6342 IEM_MC_END();
6343 }
6344 }
6345 else
6346 {
6347 /**
6348 * @opdone
6349 * @opcode 0x6e
6350 * @opcodesub rex.w=0
6351 * @oppfx 0x66
6352 * @opcpuid sse2
6353 * @opgroup og_sse2_simdint_datamove
6354 * @opxcpttype 5
6355 * @opfunction iemOp_movd_q_Vy_Ey
6356 * @optest op1=1 op2=2 -> op1=2
6357 * @optest op1=0 op2=-42 -> op1=-42
6358 */
6359 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6360 if (IEM_IS_MODRM_REG_MODE(bRm))
6361 {
6362 /* XMM, greg32 */
6363 IEM_MC_BEGIN(0, 1, 0);
6364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6365 IEM_MC_LOCAL(uint32_t, u32Tmp);
6366
6367 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6368 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6369
6370 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6371 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6372
6373 IEM_MC_ADVANCE_RIP_AND_FINISH();
6374 IEM_MC_END();
6375 }
6376 else
6377 {
6378 /* XMM, [mem32] */
6379 IEM_MC_BEGIN(0, 2, 0);
6380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6381 IEM_MC_LOCAL(uint32_t, u32Tmp);
6382
6383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6385 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6386 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6387
6388 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6389 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6390
6391 IEM_MC_ADVANCE_RIP_AND_FINISH();
6392 IEM_MC_END();
6393 }
6394 }
6395}
6396
6397/* Opcode 0xf3 0x0f 0x6e - invalid */
6398
6399
6400/**
6401 * @opcode 0x6f
6402 * @oppfx none
6403 * @opcpuid mmx
6404 * @opgroup og_mmx_datamove
6405 * @opxcpttype 5
6406 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6407 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6408 */
6409FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6410{
6411 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6413 if (IEM_IS_MODRM_REG_MODE(bRm))
6414 {
6415 /*
6416 * Register, register.
6417 */
6418 IEM_MC_BEGIN(0, 1, 0);
6419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6420 IEM_MC_LOCAL(uint64_t, u64Tmp);
6421
6422 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6423 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6424 IEM_MC_FPU_TO_MMX_MODE();
6425
6426 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6427 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6428
6429 IEM_MC_ADVANCE_RIP_AND_FINISH();
6430 IEM_MC_END();
6431 }
6432 else
6433 {
6434 /*
6435 * Register, memory.
6436 */
6437 IEM_MC_BEGIN(0, 2, 0);
6438 IEM_MC_LOCAL(uint64_t, u64Tmp);
6439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6440
6441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6443 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6444 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6445 IEM_MC_FPU_TO_MMX_MODE();
6446
6447 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6448 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6449
6450 IEM_MC_ADVANCE_RIP_AND_FINISH();
6451 IEM_MC_END();
6452 }
6453}
6454
6455/**
6456 * @opcode 0x6f
6457 * @oppfx 0x66
6458 * @opcpuid sse2
6459 * @opgroup og_sse2_simdint_datamove
6460 * @opxcpttype 1
6461 * @optest op1=1 op2=2 -> op1=2
6462 * @optest op1=0 op2=-42 -> op1=-42
6463 */
6464FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6465{
6466 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6468 if (IEM_IS_MODRM_REG_MODE(bRm))
6469 {
6470 /*
6471 * Register, register.
6472 */
6473 IEM_MC_BEGIN(0, 0, 0);
6474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6475
6476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6477 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6478
6479 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6480 IEM_GET_MODRM_RM(pVCpu, bRm));
6481 IEM_MC_ADVANCE_RIP_AND_FINISH();
6482 IEM_MC_END();
6483 }
6484 else
6485 {
6486 /*
6487 * Register, memory.
6488 */
6489 IEM_MC_BEGIN(0, 2, 0);
6490 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6492
6493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6495 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6496 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6497
6498 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6499 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6500
6501 IEM_MC_ADVANCE_RIP_AND_FINISH();
6502 IEM_MC_END();
6503 }
6504}
6505
6506/**
6507 * @opcode 0x6f
6508 * @oppfx 0xf3
6509 * @opcpuid sse2
6510 * @opgroup og_sse2_simdint_datamove
6511 * @opxcpttype 4UA
6512 * @optest op1=1 op2=2 -> op1=2
6513 * @optest op1=0 op2=-42 -> op1=-42
6514 */
6515FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6516{
6517 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6518 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6519 if (IEM_IS_MODRM_REG_MODE(bRm))
6520 {
6521 /*
6522 * Register, register.
6523 */
6524 IEM_MC_BEGIN(0, 0, 0);
6525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6526 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6527 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6528 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6529 IEM_GET_MODRM_RM(pVCpu, bRm));
6530 IEM_MC_ADVANCE_RIP_AND_FINISH();
6531 IEM_MC_END();
6532 }
6533 else
6534 {
6535 /*
6536 * Register, memory.
6537 */
6538 IEM_MC_BEGIN(0, 2, 0);
6539 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6541
6542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6544 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6545 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6546 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6547 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6548
6549 IEM_MC_ADVANCE_RIP_AND_FINISH();
6550 IEM_MC_END();
6551 }
6552}
6553
6554
6555/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6556FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6557{
6558 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6560 if (IEM_IS_MODRM_REG_MODE(bRm))
6561 {
6562 /*
6563 * Register, register.
6564 */
6565 IEM_MC_BEGIN(3, 0, 0);
6566 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6568 IEM_MC_ARG(uint64_t *, pDst, 0);
6569 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6570 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6571 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6572 IEM_MC_PREPARE_FPU_USAGE();
6573 IEM_MC_FPU_TO_MMX_MODE();
6574
6575 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6576 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6577 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6578 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6579
6580 IEM_MC_ADVANCE_RIP_AND_FINISH();
6581 IEM_MC_END();
6582 }
6583 else
6584 {
6585 /*
6586 * Register, memory.
6587 */
6588 IEM_MC_BEGIN(3, 2, 0);
6589 IEM_MC_ARG(uint64_t *, pDst, 0);
6590 IEM_MC_LOCAL(uint64_t, uSrc);
6591 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6593
6594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6595 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6596 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6598 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6599 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6600
6601 IEM_MC_PREPARE_FPU_USAGE();
6602 IEM_MC_FPU_TO_MMX_MODE();
6603
6604 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6605 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6606 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6607
6608 IEM_MC_ADVANCE_RIP_AND_FINISH();
6609 IEM_MC_END();
6610 }
6611}
6612
6613
6614/**
6615 * Common worker for SSE2 instructions on the forms:
6616 * pshufd xmm1, xmm2/mem128, imm8
6617 * pshufhw xmm1, xmm2/mem128, imm8
6618 * pshuflw xmm1, xmm2/mem128, imm8
6619 *
6620 * Proper alignment of the 128-bit operand is enforced.
6621 * Exceptions type 4. SSE2 cpuid checks.
6622 */
6623FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6624{
6625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6626 if (IEM_IS_MODRM_REG_MODE(bRm))
6627 {
6628 /*
6629 * Register, register.
6630 */
6631 IEM_MC_BEGIN(3, 0, 0);
6632 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6634 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6635 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6636 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6637 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6638 IEM_MC_PREPARE_SSE_USAGE();
6639 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6640 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6641 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6642 IEM_MC_ADVANCE_RIP_AND_FINISH();
6643 IEM_MC_END();
6644 }
6645 else
6646 {
6647 /*
6648 * Register, memory.
6649 */
6650 IEM_MC_BEGIN(3, 2, 0);
6651 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6652 IEM_MC_LOCAL(RTUINT128U, uSrc);
6653 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6655
6656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6657 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6658 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6660 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6661
6662 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6663 IEM_MC_PREPARE_SSE_USAGE();
6664 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6665 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6666
6667 IEM_MC_ADVANCE_RIP_AND_FINISH();
6668 IEM_MC_END();
6669 }
6670}
6671
6672
6673/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6674FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6675{
6676 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6677 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6678}
6679
6680
6681/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6682FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6683{
6684 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6685 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6686}
6687
6688
6689/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6690FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6691{
6692 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6693 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6694}
6695
6696
6697/**
6698 * Common worker for MMX instructions of the form:
6699 * psrlw mm, imm8
6700 * psraw mm, imm8
6701 * psllw mm, imm8
6702 * psrld mm, imm8
6703 * psrad mm, imm8
6704 * pslld mm, imm8
6705 * psrlq mm, imm8
6706 * psllq mm, imm8
6707 *
6708 */
6709FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6710{
6711 if (IEM_IS_MODRM_REG_MODE(bRm))
6712 {
6713 /*
6714 * Register, immediate.
6715 */
6716 IEM_MC_BEGIN(2, 0, 0);
6717 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6719 IEM_MC_ARG(uint64_t *, pDst, 0);
6720 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6721 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6722 IEM_MC_PREPARE_FPU_USAGE();
6723 IEM_MC_FPU_TO_MMX_MODE();
6724
6725 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6726 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6727 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6728
6729 IEM_MC_ADVANCE_RIP_AND_FINISH();
6730 IEM_MC_END();
6731 }
6732 else
6733 {
6734 /*
6735 * Register, memory not supported.
6736 */
6737 /// @todo Caller already enforced register mode?!
6738 AssertFailedReturn(VINF_SUCCESS);
6739 }
6740}
6741
6742
6743/**
6744 * Common worker for SSE2 instructions of the form:
6745 * psrlw xmm, imm8
6746 * psraw xmm, imm8
6747 * psllw xmm, imm8
6748 * psrld xmm, imm8
6749 * psrad xmm, imm8
6750 * pslld xmm, imm8
6751 * psrlq xmm, imm8
6752 * psllq xmm, imm8
6753 *
6754 */
6755FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6756{
6757 if (IEM_IS_MODRM_REG_MODE(bRm))
6758 {
6759 /*
6760 * Register, immediate.
6761 */
6762 IEM_MC_BEGIN(2, 0, 0);
6763 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6765 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6766 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6767 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6768 IEM_MC_PREPARE_SSE_USAGE();
6769 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6770 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6771 IEM_MC_ADVANCE_RIP_AND_FINISH();
6772 IEM_MC_END();
6773 }
6774 else
6775 {
6776 /*
6777 * Register, memory.
6778 */
6779 /// @todo Caller already enforced register mode?!
6780 AssertFailedReturn(VINF_SUCCESS);
6781 }
6782}
6783
6784
6785/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6786FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6787{
6788// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6789 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6790}
6791
6792
6793/** Opcode 0x66 0x0f 0x71 11/2. */
6794FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6795{
6796// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6797 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6798}
6799
6800
6801/** Opcode 0x0f 0x71 11/4. */
6802FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6803{
6804// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6805 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6806}
6807
6808
6809/** Opcode 0x66 0x0f 0x71 11/4. */
6810FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6811{
6812// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6813 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6814}
6815
6816
6817/** Opcode 0x0f 0x71 11/6. */
6818FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6819{
6820// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6821 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6822}
6823
6824
6825/** Opcode 0x66 0x0f 0x71 11/6. */
6826FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6827{
6828// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6829 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6830}
6831
6832
6833/**
6834 * Group 12 jump table for register variant.
6835 */
6836IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6837{
6838 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6839 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6840 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6841 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6842 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6843 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6844 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6845 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6846};
6847AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6848
6849
6850/** Opcode 0x0f 0x71. */
6851FNIEMOP_DEF(iemOp_Grp12)
6852{
6853 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6854 if (IEM_IS_MODRM_REG_MODE(bRm))
6855 /* register, register */
6856 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6857 + pVCpu->iem.s.idxPrefix], bRm);
6858 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6859}
6860
6861
6862/** Opcode 0x0f 0x72 11/2. */
6863FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6864{
6865// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6866 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6867}
6868
6869
6870/** Opcode 0x66 0x0f 0x72 11/2. */
6871FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6872{
6873// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6874 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6875}
6876
6877
6878/** Opcode 0x0f 0x72 11/4. */
6879FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6880{
6881// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6882 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6883}
6884
6885
6886/** Opcode 0x66 0x0f 0x72 11/4. */
6887FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6888{
6889// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6890 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6891}
6892
6893
6894/** Opcode 0x0f 0x72 11/6. */
6895FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6896{
6897// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6898 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6899}
6900
6901/** Opcode 0x66 0x0f 0x72 11/6. */
6902FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6903{
6904// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6905 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6906}
6907
6908
6909/**
6910 * Group 13 jump table for register variant.
6911 */
6912IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6913{
6914 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6915 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6916 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6917 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6918 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6919 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6920 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6921 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6922};
6923AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6924
6925/** Opcode 0x0f 0x72. */
6926FNIEMOP_DEF(iemOp_Grp13)
6927{
6928 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6929 if (IEM_IS_MODRM_REG_MODE(bRm))
6930 /* register, register */
6931 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6932 + pVCpu->iem.s.idxPrefix], bRm);
6933 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6934}
6935
6936
6937/** Opcode 0x0f 0x73 11/2. */
6938FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6939{
6940// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6941 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6942}
6943
6944
6945/** Opcode 0x66 0x0f 0x73 11/2. */
6946FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6947{
6948// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6949 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6950}
6951
6952
6953/** Opcode 0x66 0x0f 0x73 11/3. */
6954FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6955{
6956// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6957 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6958}
6959
6960
6961/** Opcode 0x0f 0x73 11/6. */
6962FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6963{
6964// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6965 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6966}
6967
6968
6969/** Opcode 0x66 0x0f 0x73 11/6. */
6970FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6971{
6972// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6973 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6974}
6975
6976
6977/** Opcode 0x66 0x0f 0x73 11/7. */
6978FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6979{
6980// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6981 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6982}
6983
6984/**
6985 * Group 14 jump table for register variant.
6986 */
6987IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6988{
6989 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6990 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6991 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6992 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6993 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6994 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6995 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6996 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6997};
6998AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6999
7000
7001/** Opcode 0x0f 0x73. */
7002FNIEMOP_DEF(iemOp_Grp14)
7003{
7004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7005 if (IEM_IS_MODRM_REG_MODE(bRm))
7006 /* register, register */
7007 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7008 + pVCpu->iem.s.idxPrefix], bRm);
7009 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
7010}
7011
7012
7013/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
7014FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
7015{
7016 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7017 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
7018}
7019
7020
7021/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
7022FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
7023{
7024 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7025 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
7026}
7027
7028
7029/* Opcode 0xf3 0x0f 0x74 - invalid */
7030/* Opcode 0xf2 0x0f 0x74 - invalid */
7031
7032
7033/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
7034FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
7035{
7036 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7037 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
7038}
7039
7040
7041/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
7042FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
7043{
7044 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7045 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
7046}
7047
7048
7049/* Opcode 0xf3 0x0f 0x75 - invalid */
7050/* Opcode 0xf2 0x0f 0x75 - invalid */
7051
7052
7053/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
7054FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
7055{
7056 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7057 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
7058}
7059
7060
7061/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
7062FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
7063{
7064 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7065 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
7066}
7067
7068
7069/* Opcode 0xf3 0x0f 0x76 - invalid */
7070/* Opcode 0xf2 0x0f 0x76 - invalid */
7071
7072
7073/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
7074FNIEMOP_DEF(iemOp_emms)
7075{
7076 IEMOP_MNEMONIC(emms, "emms");
7077 IEM_MC_BEGIN(0, 0, 0);
7078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7079 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7080 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7081 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7082 IEM_MC_FPU_FROM_MMX_MODE();
7083 IEM_MC_ADVANCE_RIP_AND_FINISH();
7084 IEM_MC_END();
7085}
7086
7087/* Opcode 0x66 0x0f 0x77 - invalid */
7088/* Opcode 0xf3 0x0f 0x77 - invalid */
7089/* Opcode 0xf2 0x0f 0x77 - invalid */
7090
7091/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
7092#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7093FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
7094{
7095 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
7096 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
7097 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
7098 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7099
7100 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7101 if (IEM_IS_MODRM_REG_MODE(bRm))
7102 {
7103 /*
7104 * Register, register.
7105 */
7106 if (enmEffOpSize == IEMMODE_64BIT)
7107 {
7108 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT);
7109 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7110 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7111 IEM_MC_ARG(uint64_t, u64Enc, 1);
7112 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7113 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7114 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmread_reg64, pu64Dst, u64Enc);
7115 IEM_MC_END();
7116 }
7117 else
7118 {
7119 IEM_MC_BEGIN(2, 0, 0);
7120 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7121 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7122 IEM_MC_ARG(uint32_t, u32Enc, 1);
7123 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7124 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7125 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmread_reg32, pu64Dst, u32Enc);
7126 IEM_MC_END();
7127 }
7128 }
7129 else
7130 {
7131 /*
7132 * Memory, register.
7133 */
7134 if (enmEffOpSize == IEMMODE_64BIT)
7135 {
7136 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT);
7137 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7138 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7139 IEM_MC_ARG(uint64_t, u64Enc, 2);
7140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7141 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7142 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7143 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7144 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7145 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7146 IEM_MC_END();
7147 }
7148 else
7149 {
7150 IEM_MC_BEGIN(3, 0, 0);
7151 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7152 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7153 IEM_MC_ARG(uint32_t, u32Enc, 2);
7154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7155 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7156 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7157 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7158 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7159 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7160 IEM_MC_END();
7161 }
7162 }
7163}
7164#else
7165FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
7166#endif
7167
7168/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7169FNIEMOP_STUB(iemOp_AmdGrp17);
7170/* Opcode 0xf3 0x0f 0x78 - invalid */
7171/* Opcode 0xf2 0x0f 0x78 - invalid */
7172
7173/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7174#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7175FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7176{
7177 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7178 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7179 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7180 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7181
7182 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7183 if (IEM_IS_MODRM_REG_MODE(bRm))
7184 {
7185 /*
7186 * Register, register.
7187 */
7188 if (enmEffOpSize == IEMMODE_64BIT)
7189 {
7190 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT);
7191 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7192 IEM_MC_ARG(uint64_t, u64Val, 0);
7193 IEM_MC_ARG(uint64_t, u64Enc, 1);
7194 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7195 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7196 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmwrite_reg, u64Val, u64Enc);
7197 IEM_MC_END();
7198 }
7199 else
7200 {
7201 IEM_MC_BEGIN(2, 0, 0);
7202 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7203 IEM_MC_ARG(uint32_t, u32Val, 0);
7204 IEM_MC_ARG(uint32_t, u32Enc, 1);
7205 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7206 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7207 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmwrite_reg, u32Val, u32Enc);
7208 IEM_MC_END();
7209 }
7210 }
7211 else
7212 {
7213 /*
7214 * Register, memory.
7215 */
7216 if (enmEffOpSize == IEMMODE_64BIT)
7217 {
7218 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT);
7219 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7220 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7221 IEM_MC_ARG(uint64_t, u64Enc, 2);
7222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7223 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7224 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7225 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7226 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7227 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7228 IEM_MC_END();
7229 }
7230 else
7231 {
7232 IEM_MC_BEGIN(3, 0, 0);
7233 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7234 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7235 IEM_MC_ARG(uint32_t, u32Enc, 2);
7236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7237 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7238 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7239 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7240 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7241 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7242 IEM_MC_END();
7243 }
7244 }
7245}
7246#else
7247FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
7248#endif
7249/* Opcode 0x66 0x0f 0x79 - invalid */
7250/* Opcode 0xf3 0x0f 0x79 - invalid */
7251/* Opcode 0xf2 0x0f 0x79 - invalid */
7252
7253/* Opcode 0x0f 0x7a - invalid */
7254/* Opcode 0x66 0x0f 0x7a - invalid */
7255/* Opcode 0xf3 0x0f 0x7a - invalid */
7256/* Opcode 0xf2 0x0f 0x7a - invalid */
7257
7258/* Opcode 0x0f 0x7b - invalid */
7259/* Opcode 0x66 0x0f 0x7b - invalid */
7260/* Opcode 0xf3 0x0f 0x7b - invalid */
7261/* Opcode 0xf2 0x0f 0x7b - invalid */
7262
7263/* Opcode 0x0f 0x7c - invalid */
7264
7265
7266/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7267FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7268{
7269 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7270 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7271}
7272
7273
7274/* Opcode 0xf3 0x0f 0x7c - invalid */
7275
7276
7277/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7278FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7279{
7280 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7281 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7282}
7283
7284
7285/* Opcode 0x0f 0x7d - invalid */
7286
7287
7288/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7289FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7290{
7291 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7292 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7293}
7294
7295
7296/* Opcode 0xf3 0x0f 0x7d - invalid */
7297
7298
7299/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7300FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7301{
7302 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7303 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7304}
7305
7306
7307/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7308FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7309{
7310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7311 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7312 {
7313 /**
7314 * @opcode 0x7e
7315 * @opcodesub rex.w=1
7316 * @oppfx none
7317 * @opcpuid mmx
7318 * @opgroup og_mmx_datamove
7319 * @opxcpttype 5
7320 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7321 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7322 */
7323 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7324 if (IEM_IS_MODRM_REG_MODE(bRm))
7325 {
7326 /* greg64, MMX */
7327 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
7328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7329 IEM_MC_LOCAL(uint64_t, u64Tmp);
7330
7331 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7332 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7333 IEM_MC_FPU_TO_MMX_MODE();
7334
7335 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7336 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7337
7338 IEM_MC_ADVANCE_RIP_AND_FINISH();
7339 IEM_MC_END();
7340 }
7341 else
7342 {
7343 /* [mem64], MMX */
7344 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
7345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7346 IEM_MC_LOCAL(uint64_t, u64Tmp);
7347
7348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7350 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7351 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7352 IEM_MC_FPU_TO_MMX_MODE();
7353
7354 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7355 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7356
7357 IEM_MC_ADVANCE_RIP_AND_FINISH();
7358 IEM_MC_END();
7359 }
7360 }
7361 else
7362 {
7363 /**
7364 * @opdone
7365 * @opcode 0x7e
7366 * @opcodesub rex.w=0
7367 * @oppfx none
7368 * @opcpuid mmx
7369 * @opgroup og_mmx_datamove
7370 * @opxcpttype 5
7371 * @opfunction iemOp_movd_q_Pd_Ey
7372 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7373 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7374 */
7375 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7376 if (IEM_IS_MODRM_REG_MODE(bRm))
7377 {
7378 /* greg32, MMX */
7379 IEM_MC_BEGIN(0, 1, 0);
7380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7381 IEM_MC_LOCAL(uint32_t, u32Tmp);
7382
7383 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7384 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7385 IEM_MC_FPU_TO_MMX_MODE();
7386
7387 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7388 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7389
7390 IEM_MC_ADVANCE_RIP_AND_FINISH();
7391 IEM_MC_END();
7392 }
7393 else
7394 {
7395 /* [mem32], MMX */
7396 IEM_MC_BEGIN(0, 2, 0);
7397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7398 IEM_MC_LOCAL(uint32_t, u32Tmp);
7399
7400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7402 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7403 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7404 IEM_MC_FPU_TO_MMX_MODE();
7405
7406 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7407 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7408
7409 IEM_MC_ADVANCE_RIP_AND_FINISH();
7410 IEM_MC_END();
7411 }
7412 }
7413}
7414
7415
7416FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7417{
7418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7419 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7420 {
7421 /**
7422 * @opcode 0x7e
7423 * @opcodesub rex.w=1
7424 * @oppfx 0x66
7425 * @opcpuid sse2
7426 * @opgroup og_sse2_simdint_datamove
7427 * @opxcpttype 5
7428 * @optest 64-bit / op1=1 op2=2 -> op1=2
7429 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7430 */
7431 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7432 if (IEM_IS_MODRM_REG_MODE(bRm))
7433 {
7434 /* greg64, XMM */
7435 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
7436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7437 IEM_MC_LOCAL(uint64_t, u64Tmp);
7438
7439 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7440 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7441
7442 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7443 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7444
7445 IEM_MC_ADVANCE_RIP_AND_FINISH();
7446 IEM_MC_END();
7447 }
7448 else
7449 {
7450 /* [mem64], XMM */
7451 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
7452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7453 IEM_MC_LOCAL(uint64_t, u64Tmp);
7454
7455 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7457 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7458 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7459
7460 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7461 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7462
7463 IEM_MC_ADVANCE_RIP_AND_FINISH();
7464 IEM_MC_END();
7465 }
7466 }
7467 else
7468 {
7469 /**
7470 * @opdone
7471 * @opcode 0x7e
7472 * @opcodesub rex.w=0
7473 * @oppfx 0x66
7474 * @opcpuid sse2
7475 * @opgroup og_sse2_simdint_datamove
7476 * @opxcpttype 5
7477 * @opfunction iemOp_movd_q_Vy_Ey
7478 * @optest op1=1 op2=2 -> op1=2
7479 * @optest op1=0 op2=-42 -> op1=-42
7480 */
7481 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7482 if (IEM_IS_MODRM_REG_MODE(bRm))
7483 {
7484 /* greg32, XMM */
7485 IEM_MC_BEGIN(0, 1, 0);
7486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7487 IEM_MC_LOCAL(uint32_t, u32Tmp);
7488
7489 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7490 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7491
7492 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7493 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7494
7495 IEM_MC_ADVANCE_RIP_AND_FINISH();
7496 IEM_MC_END();
7497 }
7498 else
7499 {
7500 /* [mem32], XMM */
7501 IEM_MC_BEGIN(0, 2, 0);
7502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7503 IEM_MC_LOCAL(uint32_t, u32Tmp);
7504
7505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7507 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7508 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7509
7510 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7511 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7512
7513 IEM_MC_ADVANCE_RIP_AND_FINISH();
7514 IEM_MC_END();
7515 }
7516 }
7517}
7518
7519/**
7520 * @opcode 0x7e
7521 * @oppfx 0xf3
7522 * @opcpuid sse2
7523 * @opgroup og_sse2_pcksclr_datamove
7524 * @opxcpttype none
7525 * @optest op1=1 op2=2 -> op1=2
7526 * @optest op1=0 op2=-42 -> op1=-42
7527 */
7528FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7529{
7530 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7531 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7532 if (IEM_IS_MODRM_REG_MODE(bRm))
7533 {
7534 /*
7535 * XMM128, XMM64.
7536 */
7537 IEM_MC_BEGIN(0, 2, 0);
7538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7539 IEM_MC_LOCAL(uint64_t, uSrc);
7540
7541 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7542 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7543
7544 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7545 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7546
7547 IEM_MC_ADVANCE_RIP_AND_FINISH();
7548 IEM_MC_END();
7549 }
7550 else
7551 {
7552 /*
7553 * XMM128, [mem64].
7554 */
7555 IEM_MC_BEGIN(0, 2, 0);
7556 IEM_MC_LOCAL(uint64_t, uSrc);
7557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7558
7559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7561 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7562 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7563
7564 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7565 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7566
7567 IEM_MC_ADVANCE_RIP_AND_FINISH();
7568 IEM_MC_END();
7569 }
7570}
7571
7572/* Opcode 0xf2 0x0f 0x7e - invalid */
7573
7574
7575/** Opcode 0x0f 0x7f - movq Qq, Pq */
7576FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7577{
7578 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7579 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7580 if (IEM_IS_MODRM_REG_MODE(bRm))
7581 {
7582 /*
7583 * MMX, MMX.
7584 */
7585 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7586 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7587 IEM_MC_BEGIN(0, 1, 0);
7588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7589 IEM_MC_LOCAL(uint64_t, u64Tmp);
7590 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7591 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7592 IEM_MC_FPU_TO_MMX_MODE();
7593
7594 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7595 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7596
7597 IEM_MC_ADVANCE_RIP_AND_FINISH();
7598 IEM_MC_END();
7599 }
7600 else
7601 {
7602 /*
7603 * [mem64], MMX.
7604 */
7605 IEM_MC_BEGIN(0, 2, 0);
7606 IEM_MC_LOCAL(uint64_t, u64Tmp);
7607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7608
7609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7611 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7612 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7613 IEM_MC_FPU_TO_MMX_MODE();
7614
7615 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7616 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7617
7618 IEM_MC_ADVANCE_RIP_AND_FINISH();
7619 IEM_MC_END();
7620 }
7621}
7622
7623/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7624FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7625{
7626 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7628 if (IEM_IS_MODRM_REG_MODE(bRm))
7629 {
7630 /*
7631 * XMM, XMM.
7632 */
7633 IEM_MC_BEGIN(0, 0, 0);
7634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7635 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7636 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7637 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7638 IEM_GET_MODRM_REG(pVCpu, bRm));
7639 IEM_MC_ADVANCE_RIP_AND_FINISH();
7640 IEM_MC_END();
7641 }
7642 else
7643 {
7644 /*
7645 * [mem128], XMM.
7646 */
7647 IEM_MC_BEGIN(0, 2, 0);
7648 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7650
7651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7653 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7654 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7655
7656 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7657 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7658
7659 IEM_MC_ADVANCE_RIP_AND_FINISH();
7660 IEM_MC_END();
7661 }
7662}
7663
7664/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7665FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7666{
7667 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7668 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7669 if (IEM_IS_MODRM_REG_MODE(bRm))
7670 {
7671 /*
7672 * XMM, XMM.
7673 */
7674 IEM_MC_BEGIN(0, 0, 0);
7675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7676 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7677 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7678 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7679 IEM_GET_MODRM_REG(pVCpu, bRm));
7680 IEM_MC_ADVANCE_RIP_AND_FINISH();
7681 IEM_MC_END();
7682 }
7683 else
7684 {
7685 /*
7686 * [mem128], XMM.
7687 */
7688 IEM_MC_BEGIN(0, 2, 0);
7689 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7691
7692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7694 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7695 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7696
7697 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7698 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7699
7700 IEM_MC_ADVANCE_RIP_AND_FINISH();
7701 IEM_MC_END();
7702 }
7703}
7704
7705/* Opcode 0xf2 0x0f 0x7f - invalid */
7706
7707
7708
7709/** Opcode 0x0f 0x80. */
7710FNIEMOP_DEF(iemOp_jo_Jv)
7711{
7712 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7713 IEMOP_HLP_MIN_386();
7714 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7715 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7716 {
7717 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7718 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7720 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7721 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7722 } IEM_MC_ELSE() {
7723 IEM_MC_ADVANCE_RIP_AND_FINISH();
7724 } IEM_MC_ENDIF();
7725 IEM_MC_END();
7726 }
7727 else
7728 {
7729 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7730 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7732 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7733 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7734 } IEM_MC_ELSE() {
7735 IEM_MC_ADVANCE_RIP_AND_FINISH();
7736 } IEM_MC_ENDIF();
7737 IEM_MC_END();
7738 }
7739}
7740
7741
7742/** Opcode 0x0f 0x81. */
7743FNIEMOP_DEF(iemOp_jno_Jv)
7744{
7745 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7746 IEMOP_HLP_MIN_386();
7747 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7748 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7749 {
7750 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7751 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7753 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7754 IEM_MC_ADVANCE_RIP_AND_FINISH();
7755 } IEM_MC_ELSE() {
7756 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7757 } IEM_MC_ENDIF();
7758 IEM_MC_END();
7759 }
7760 else
7761 {
7762 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7763 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7765 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7766 IEM_MC_ADVANCE_RIP_AND_FINISH();
7767 } IEM_MC_ELSE() {
7768 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7769 } IEM_MC_ENDIF();
7770 IEM_MC_END();
7771 }
7772}
7773
7774
7775/** Opcode 0x0f 0x82. */
7776FNIEMOP_DEF(iemOp_jc_Jv)
7777{
7778 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7779 IEMOP_HLP_MIN_386();
7780 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7781 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7782 {
7783 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7784 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7786 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7787 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7788 } IEM_MC_ELSE() {
7789 IEM_MC_ADVANCE_RIP_AND_FINISH();
7790 } IEM_MC_ENDIF();
7791 IEM_MC_END();
7792 }
7793 else
7794 {
7795 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7796 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7798 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7799 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7800 } IEM_MC_ELSE() {
7801 IEM_MC_ADVANCE_RIP_AND_FINISH();
7802 } IEM_MC_ENDIF();
7803 IEM_MC_END();
7804 }
7805}
7806
7807
7808/** Opcode 0x0f 0x83. */
7809FNIEMOP_DEF(iemOp_jnc_Jv)
7810{
7811 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7812 IEMOP_HLP_MIN_386();
7813 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7814 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7815 {
7816 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7817 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7819 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7820 IEM_MC_ADVANCE_RIP_AND_FINISH();
7821 } IEM_MC_ELSE() {
7822 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7823 } IEM_MC_ENDIF();
7824 IEM_MC_END();
7825 }
7826 else
7827 {
7828 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7829 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7831 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7832 IEM_MC_ADVANCE_RIP_AND_FINISH();
7833 } IEM_MC_ELSE() {
7834 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7835 } IEM_MC_ENDIF();
7836 IEM_MC_END();
7837 }
7838}
7839
7840
7841/** Opcode 0x0f 0x84. */
7842FNIEMOP_DEF(iemOp_je_Jv)
7843{
7844 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7845 IEMOP_HLP_MIN_386();
7846 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7847 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7848 {
7849 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7850 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7852 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7853 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7854 } IEM_MC_ELSE() {
7855 IEM_MC_ADVANCE_RIP_AND_FINISH();
7856 } IEM_MC_ENDIF();
7857 IEM_MC_END();
7858 }
7859 else
7860 {
7861 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7862 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7864 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7865 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7866 } IEM_MC_ELSE() {
7867 IEM_MC_ADVANCE_RIP_AND_FINISH();
7868 } IEM_MC_ENDIF();
7869 IEM_MC_END();
7870 }
7871}
7872
7873
7874/** Opcode 0x0f 0x85. */
7875FNIEMOP_DEF(iemOp_jne_Jv)
7876{
7877 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7878 IEMOP_HLP_MIN_386();
7879 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7880 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7881 {
7882 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7883 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7885 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7886 IEM_MC_ADVANCE_RIP_AND_FINISH();
7887 } IEM_MC_ELSE() {
7888 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7889 } IEM_MC_ENDIF();
7890 IEM_MC_END();
7891 }
7892 else
7893 {
7894 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7895 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7897 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7898 IEM_MC_ADVANCE_RIP_AND_FINISH();
7899 } IEM_MC_ELSE() {
7900 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7901 } IEM_MC_ENDIF();
7902 IEM_MC_END();
7903 }
7904}
7905
7906
7907/** Opcode 0x0f 0x86. */
7908FNIEMOP_DEF(iemOp_jbe_Jv)
7909{
7910 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7911 IEMOP_HLP_MIN_386();
7912 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7913 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7914 {
7915 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7916 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7918 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7919 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7920 } IEM_MC_ELSE() {
7921 IEM_MC_ADVANCE_RIP_AND_FINISH();
7922 } IEM_MC_ENDIF();
7923 IEM_MC_END();
7924 }
7925 else
7926 {
7927 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7928 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7930 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7931 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7932 } IEM_MC_ELSE() {
7933 IEM_MC_ADVANCE_RIP_AND_FINISH();
7934 } IEM_MC_ENDIF();
7935 IEM_MC_END();
7936 }
7937}
7938
7939
7940/** Opcode 0x0f 0x87. */
7941FNIEMOP_DEF(iemOp_jnbe_Jv)
7942{
7943 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7944 IEMOP_HLP_MIN_386();
7945 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7946 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7947 {
7948 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7949 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7951 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7952 IEM_MC_ADVANCE_RIP_AND_FINISH();
7953 } IEM_MC_ELSE() {
7954 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7955 } IEM_MC_ENDIF();
7956 IEM_MC_END();
7957 }
7958 else
7959 {
7960 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7961 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7963 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7964 IEM_MC_ADVANCE_RIP_AND_FINISH();
7965 } IEM_MC_ELSE() {
7966 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7967 } IEM_MC_ENDIF();
7968 IEM_MC_END();
7969 }
7970}
7971
7972
7973/** Opcode 0x0f 0x88. */
7974FNIEMOP_DEF(iemOp_js_Jv)
7975{
7976 IEMOP_MNEMONIC(js_Jv, "js Jv");
7977 IEMOP_HLP_MIN_386();
7978 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7979 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7980 {
7981 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7982 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7984 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7985 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7986 } IEM_MC_ELSE() {
7987 IEM_MC_ADVANCE_RIP_AND_FINISH();
7988 } IEM_MC_ENDIF();
7989 IEM_MC_END();
7990 }
7991 else
7992 {
7993 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7994 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7996 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7997 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7998 } IEM_MC_ELSE() {
7999 IEM_MC_ADVANCE_RIP_AND_FINISH();
8000 } IEM_MC_ENDIF();
8001 IEM_MC_END();
8002 }
8003}
8004
8005
8006/** Opcode 0x0f 0x89. */
8007FNIEMOP_DEF(iemOp_jns_Jv)
8008{
8009 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
8010 IEMOP_HLP_MIN_386();
8011 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8012 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8013 {
8014 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8015 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8017 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8018 IEM_MC_ADVANCE_RIP_AND_FINISH();
8019 } IEM_MC_ELSE() {
8020 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8021 } IEM_MC_ENDIF();
8022 IEM_MC_END();
8023 }
8024 else
8025 {
8026 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8027 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8029 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8030 IEM_MC_ADVANCE_RIP_AND_FINISH();
8031 } IEM_MC_ELSE() {
8032 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8033 } IEM_MC_ENDIF();
8034 IEM_MC_END();
8035 }
8036}
8037
8038
8039/** Opcode 0x0f 0x8a. */
8040FNIEMOP_DEF(iemOp_jp_Jv)
8041{
8042 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
8043 IEMOP_HLP_MIN_386();
8044 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8045 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8046 {
8047 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8048 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8050 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8051 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8052 } IEM_MC_ELSE() {
8053 IEM_MC_ADVANCE_RIP_AND_FINISH();
8054 } IEM_MC_ENDIF();
8055 IEM_MC_END();
8056 }
8057 else
8058 {
8059 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8060 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8062 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8063 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8064 } IEM_MC_ELSE() {
8065 IEM_MC_ADVANCE_RIP_AND_FINISH();
8066 } IEM_MC_ENDIF();
8067 IEM_MC_END();
8068 }
8069}
8070
8071
8072/** Opcode 0x0f 0x8b. */
8073FNIEMOP_DEF(iemOp_jnp_Jv)
8074{
8075 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
8076 IEMOP_HLP_MIN_386();
8077 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8078 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8079 {
8080 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8081 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8083 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8084 IEM_MC_ADVANCE_RIP_AND_FINISH();
8085 } IEM_MC_ELSE() {
8086 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8087 } IEM_MC_ENDIF();
8088 IEM_MC_END();
8089 }
8090 else
8091 {
8092 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8093 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8095 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8096 IEM_MC_ADVANCE_RIP_AND_FINISH();
8097 } IEM_MC_ELSE() {
8098 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8099 } IEM_MC_ENDIF();
8100 IEM_MC_END();
8101 }
8102}
8103
8104
8105/** Opcode 0x0f 0x8c. */
8106FNIEMOP_DEF(iemOp_jl_Jv)
8107{
8108 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8109 IEMOP_HLP_MIN_386();
8110 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8111 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8112 {
8113 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8114 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8116 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8117 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8118 } IEM_MC_ELSE() {
8119 IEM_MC_ADVANCE_RIP_AND_FINISH();
8120 } IEM_MC_ENDIF();
8121 IEM_MC_END();
8122 }
8123 else
8124 {
8125 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8126 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8128 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8129 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8130 } IEM_MC_ELSE() {
8131 IEM_MC_ADVANCE_RIP_AND_FINISH();
8132 } IEM_MC_ENDIF();
8133 IEM_MC_END();
8134 }
8135}
8136
8137
8138/** Opcode 0x0f 0x8d. */
8139FNIEMOP_DEF(iemOp_jnl_Jv)
8140{
8141 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8142 IEMOP_HLP_MIN_386();
8143 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8144 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8145 {
8146 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8147 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8149 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8150 IEM_MC_ADVANCE_RIP_AND_FINISH();
8151 } IEM_MC_ELSE() {
8152 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8153 } IEM_MC_ENDIF();
8154 IEM_MC_END();
8155 }
8156 else
8157 {
8158 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8159 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8161 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8162 IEM_MC_ADVANCE_RIP_AND_FINISH();
8163 } IEM_MC_ELSE() {
8164 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8165 } IEM_MC_ENDIF();
8166 IEM_MC_END();
8167 }
8168}
8169
8170
8171/** Opcode 0x0f 0x8e. */
8172FNIEMOP_DEF(iemOp_jle_Jv)
8173{
8174 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8175 IEMOP_HLP_MIN_386();
8176 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8177 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8178 {
8179 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8180 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8182 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8183 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8184 } IEM_MC_ELSE() {
8185 IEM_MC_ADVANCE_RIP_AND_FINISH();
8186 } IEM_MC_ENDIF();
8187 IEM_MC_END();
8188 }
8189 else
8190 {
8191 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8192 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8194 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8195 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8196 } IEM_MC_ELSE() {
8197 IEM_MC_ADVANCE_RIP_AND_FINISH();
8198 } IEM_MC_ENDIF();
8199 IEM_MC_END();
8200 }
8201}
8202
8203
8204/** Opcode 0x0f 0x8f. */
8205FNIEMOP_DEF(iemOp_jnle_Jv)
8206{
8207 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8208 IEMOP_HLP_MIN_386();
8209 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8210 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8211 {
8212 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8213 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8215 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8216 IEM_MC_ADVANCE_RIP_AND_FINISH();
8217 } IEM_MC_ELSE() {
8218 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8219 } IEM_MC_ENDIF();
8220 IEM_MC_END();
8221 }
8222 else
8223 {
8224 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8225 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8227 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8228 IEM_MC_ADVANCE_RIP_AND_FINISH();
8229 } IEM_MC_ELSE() {
8230 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8231 } IEM_MC_ENDIF();
8232 IEM_MC_END();
8233 }
8234}
8235
8236
8237/** Opcode 0x0f 0x90. */
8238FNIEMOP_DEF(iemOp_seto_Eb)
8239{
8240 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8241 IEMOP_HLP_MIN_386();
8242 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8243
8244 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8245 * any way. AMD says it's "unused", whatever that means. We're
8246 * ignoring for now. */
8247 if (IEM_IS_MODRM_REG_MODE(bRm))
8248 {
8249 /* register target */
8250 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8252 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8253 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8254 } IEM_MC_ELSE() {
8255 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8256 } IEM_MC_ENDIF();
8257 IEM_MC_ADVANCE_RIP_AND_FINISH();
8258 IEM_MC_END();
8259 }
8260 else
8261 {
8262 /* memory target */
8263 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8267 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8268 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8269 } IEM_MC_ELSE() {
8270 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8271 } IEM_MC_ENDIF();
8272 IEM_MC_ADVANCE_RIP_AND_FINISH();
8273 IEM_MC_END();
8274 }
8275}
8276
8277
8278/** Opcode 0x0f 0x91. */
8279FNIEMOP_DEF(iemOp_setno_Eb)
8280{
8281 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8282 IEMOP_HLP_MIN_386();
8283 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8284
8285 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8286 * any way. AMD says it's "unused", whatever that means. We're
8287 * ignoring for now. */
8288 if (IEM_IS_MODRM_REG_MODE(bRm))
8289 {
8290 /* register target */
8291 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8293 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8294 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8295 } IEM_MC_ELSE() {
8296 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8297 } IEM_MC_ENDIF();
8298 IEM_MC_ADVANCE_RIP_AND_FINISH();
8299 IEM_MC_END();
8300 }
8301 else
8302 {
8303 /* memory target */
8304 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8308 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8309 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8310 } IEM_MC_ELSE() {
8311 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8312 } IEM_MC_ENDIF();
8313 IEM_MC_ADVANCE_RIP_AND_FINISH();
8314 IEM_MC_END();
8315 }
8316}
8317
8318
8319/** Opcode 0x0f 0x92. */
8320FNIEMOP_DEF(iemOp_setc_Eb)
8321{
8322 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8323 IEMOP_HLP_MIN_386();
8324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8325
8326 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8327 * any way. AMD says it's "unused", whatever that means. We're
8328 * ignoring for now. */
8329 if (IEM_IS_MODRM_REG_MODE(bRm))
8330 {
8331 /* register target */
8332 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8334 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8335 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8336 } IEM_MC_ELSE() {
8337 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8338 } IEM_MC_ENDIF();
8339 IEM_MC_ADVANCE_RIP_AND_FINISH();
8340 IEM_MC_END();
8341 }
8342 else
8343 {
8344 /* memory target */
8345 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8349 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8350 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8351 } IEM_MC_ELSE() {
8352 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8353 } IEM_MC_ENDIF();
8354 IEM_MC_ADVANCE_RIP_AND_FINISH();
8355 IEM_MC_END();
8356 }
8357}
8358
8359
8360/** Opcode 0x0f 0x93. */
8361FNIEMOP_DEF(iemOp_setnc_Eb)
8362{
8363 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8364 IEMOP_HLP_MIN_386();
8365 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8366
8367 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8368 * any way. AMD says it's "unused", whatever that means. We're
8369 * ignoring for now. */
8370 if (IEM_IS_MODRM_REG_MODE(bRm))
8371 {
8372 /* register target */
8373 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8375 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8376 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8377 } IEM_MC_ELSE() {
8378 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8379 } IEM_MC_ENDIF();
8380 IEM_MC_ADVANCE_RIP_AND_FINISH();
8381 IEM_MC_END();
8382 }
8383 else
8384 {
8385 /* memory target */
8386 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8390 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8391 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8392 } IEM_MC_ELSE() {
8393 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8394 } IEM_MC_ENDIF();
8395 IEM_MC_ADVANCE_RIP_AND_FINISH();
8396 IEM_MC_END();
8397 }
8398}
8399
8400
8401/** Opcode 0x0f 0x94. */
8402FNIEMOP_DEF(iemOp_sete_Eb)
8403{
8404 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8405 IEMOP_HLP_MIN_386();
8406 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8407
8408 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8409 * any way. AMD says it's "unused", whatever that means. We're
8410 * ignoring for now. */
8411 if (IEM_IS_MODRM_REG_MODE(bRm))
8412 {
8413 /* register target */
8414 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8416 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8417 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8418 } IEM_MC_ELSE() {
8419 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8420 } IEM_MC_ENDIF();
8421 IEM_MC_ADVANCE_RIP_AND_FINISH();
8422 IEM_MC_END();
8423 }
8424 else
8425 {
8426 /* memory target */
8427 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8431 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8432 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8433 } IEM_MC_ELSE() {
8434 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8435 } IEM_MC_ENDIF();
8436 IEM_MC_ADVANCE_RIP_AND_FINISH();
8437 IEM_MC_END();
8438 }
8439}
8440
8441
8442/** Opcode 0x0f 0x95. */
8443FNIEMOP_DEF(iemOp_setne_Eb)
8444{
8445 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8446 IEMOP_HLP_MIN_386();
8447 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8448
8449 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8450 * any way. AMD says it's "unused", whatever that means. We're
8451 * ignoring for now. */
8452 if (IEM_IS_MODRM_REG_MODE(bRm))
8453 {
8454 /* register target */
8455 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8457 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8458 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8459 } IEM_MC_ELSE() {
8460 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8461 } IEM_MC_ENDIF();
8462 IEM_MC_ADVANCE_RIP_AND_FINISH();
8463 IEM_MC_END();
8464 }
8465 else
8466 {
8467 /* memory target */
8468 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8472 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8473 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8474 } IEM_MC_ELSE() {
8475 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8476 } IEM_MC_ENDIF();
8477 IEM_MC_ADVANCE_RIP_AND_FINISH();
8478 IEM_MC_END();
8479 }
8480}
8481
8482
8483/** Opcode 0x0f 0x96. */
8484FNIEMOP_DEF(iemOp_setbe_Eb)
8485{
8486 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8487 IEMOP_HLP_MIN_386();
8488 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8489
8490 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8491 * any way. AMD says it's "unused", whatever that means. We're
8492 * ignoring for now. */
8493 if (IEM_IS_MODRM_REG_MODE(bRm))
8494 {
8495 /* register target */
8496 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8498 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8499 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8500 } IEM_MC_ELSE() {
8501 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8502 } IEM_MC_ENDIF();
8503 IEM_MC_ADVANCE_RIP_AND_FINISH();
8504 IEM_MC_END();
8505 }
8506 else
8507 {
8508 /* memory target */
8509 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8513 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8514 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8515 } IEM_MC_ELSE() {
8516 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8517 } IEM_MC_ENDIF();
8518 IEM_MC_ADVANCE_RIP_AND_FINISH();
8519 IEM_MC_END();
8520 }
8521}
8522
8523
8524/** Opcode 0x0f 0x97. */
8525FNIEMOP_DEF(iemOp_setnbe_Eb)
8526{
8527 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8528 IEMOP_HLP_MIN_386();
8529 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8530
8531 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8532 * any way. AMD says it's "unused", whatever that means. We're
8533 * ignoring for now. */
8534 if (IEM_IS_MODRM_REG_MODE(bRm))
8535 {
8536 /* register target */
8537 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8539 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8540 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8541 } IEM_MC_ELSE() {
8542 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8543 } IEM_MC_ENDIF();
8544 IEM_MC_ADVANCE_RIP_AND_FINISH();
8545 IEM_MC_END();
8546 }
8547 else
8548 {
8549 /* memory target */
8550 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8554 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8555 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8556 } IEM_MC_ELSE() {
8557 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8558 } IEM_MC_ENDIF();
8559 IEM_MC_ADVANCE_RIP_AND_FINISH();
8560 IEM_MC_END();
8561 }
8562}
8563
8564
8565/** Opcode 0x0f 0x98. */
8566FNIEMOP_DEF(iemOp_sets_Eb)
8567{
8568 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8569 IEMOP_HLP_MIN_386();
8570 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8571
8572 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8573 * any way. AMD says it's "unused", whatever that means. We're
8574 * ignoring for now. */
8575 if (IEM_IS_MODRM_REG_MODE(bRm))
8576 {
8577 /* register target */
8578 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8580 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8581 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8582 } IEM_MC_ELSE() {
8583 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8584 } IEM_MC_ENDIF();
8585 IEM_MC_ADVANCE_RIP_AND_FINISH();
8586 IEM_MC_END();
8587 }
8588 else
8589 {
8590 /* memory target */
8591 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8595 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8596 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8597 } IEM_MC_ELSE() {
8598 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8599 } IEM_MC_ENDIF();
8600 IEM_MC_ADVANCE_RIP_AND_FINISH();
8601 IEM_MC_END();
8602 }
8603}
8604
8605
8606/** Opcode 0x0f 0x99. */
8607FNIEMOP_DEF(iemOp_setns_Eb)
8608{
8609 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8610 IEMOP_HLP_MIN_386();
8611 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8612
8613 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8614 * any way. AMD says it's "unused", whatever that means. We're
8615 * ignoring for now. */
8616 if (IEM_IS_MODRM_REG_MODE(bRm))
8617 {
8618 /* register target */
8619 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8621 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8622 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8623 } IEM_MC_ELSE() {
8624 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8625 } IEM_MC_ENDIF();
8626 IEM_MC_ADVANCE_RIP_AND_FINISH();
8627 IEM_MC_END();
8628 }
8629 else
8630 {
8631 /* memory target */
8632 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8636 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8637 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8638 } IEM_MC_ELSE() {
8639 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8640 } IEM_MC_ENDIF();
8641 IEM_MC_ADVANCE_RIP_AND_FINISH();
8642 IEM_MC_END();
8643 }
8644}
8645
8646
8647/** Opcode 0x0f 0x9a. */
8648FNIEMOP_DEF(iemOp_setp_Eb)
8649{
8650 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8651 IEMOP_HLP_MIN_386();
8652 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8653
8654 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8655 * any way. AMD says it's "unused", whatever that means. We're
8656 * ignoring for now. */
8657 if (IEM_IS_MODRM_REG_MODE(bRm))
8658 {
8659 /* register target */
8660 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8662 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8663 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8664 } IEM_MC_ELSE() {
8665 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8666 } IEM_MC_ENDIF();
8667 IEM_MC_ADVANCE_RIP_AND_FINISH();
8668 IEM_MC_END();
8669 }
8670 else
8671 {
8672 /* memory target */
8673 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8677 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8678 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8679 } IEM_MC_ELSE() {
8680 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8681 } IEM_MC_ENDIF();
8682 IEM_MC_ADVANCE_RIP_AND_FINISH();
8683 IEM_MC_END();
8684 }
8685}
8686
8687
8688/** Opcode 0x0f 0x9b. */
8689FNIEMOP_DEF(iemOp_setnp_Eb)
8690{
8691 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8692 IEMOP_HLP_MIN_386();
8693 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8694
8695 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8696 * any way. AMD says it's "unused", whatever that means. We're
8697 * ignoring for now. */
8698 if (IEM_IS_MODRM_REG_MODE(bRm))
8699 {
8700 /* register target */
8701 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8703 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8704 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8705 } IEM_MC_ELSE() {
8706 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8707 } IEM_MC_ENDIF();
8708 IEM_MC_ADVANCE_RIP_AND_FINISH();
8709 IEM_MC_END();
8710 }
8711 else
8712 {
8713 /* memory target */
8714 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8718 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8719 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8720 } IEM_MC_ELSE() {
8721 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8722 } IEM_MC_ENDIF();
8723 IEM_MC_ADVANCE_RIP_AND_FINISH();
8724 IEM_MC_END();
8725 }
8726}
8727
8728
8729/** Opcode 0x0f 0x9c. */
8730FNIEMOP_DEF(iemOp_setl_Eb)
8731{
8732 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8733 IEMOP_HLP_MIN_386();
8734 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8735
8736 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8737 * any way. AMD says it's "unused", whatever that means. We're
8738 * ignoring for now. */
8739 if (IEM_IS_MODRM_REG_MODE(bRm))
8740 {
8741 /* register target */
8742 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8744 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8745 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8746 } IEM_MC_ELSE() {
8747 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8748 } IEM_MC_ENDIF();
8749 IEM_MC_ADVANCE_RIP_AND_FINISH();
8750 IEM_MC_END();
8751 }
8752 else
8753 {
8754 /* memory target */
8755 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8759 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8760 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8761 } IEM_MC_ELSE() {
8762 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8763 } IEM_MC_ENDIF();
8764 IEM_MC_ADVANCE_RIP_AND_FINISH();
8765 IEM_MC_END();
8766 }
8767}
8768
8769
8770/** Opcode 0x0f 0x9d. */
8771FNIEMOP_DEF(iemOp_setnl_Eb)
8772{
8773 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8774 IEMOP_HLP_MIN_386();
8775 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8776
8777 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8778 * any way. AMD says it's "unused", whatever that means. We're
8779 * ignoring for now. */
8780 if (IEM_IS_MODRM_REG_MODE(bRm))
8781 {
8782 /* register target */
8783 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8785 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8786 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8787 } IEM_MC_ELSE() {
8788 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8789 } IEM_MC_ENDIF();
8790 IEM_MC_ADVANCE_RIP_AND_FINISH();
8791 IEM_MC_END();
8792 }
8793 else
8794 {
8795 /* memory target */
8796 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8800 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8801 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8802 } IEM_MC_ELSE() {
8803 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8804 } IEM_MC_ENDIF();
8805 IEM_MC_ADVANCE_RIP_AND_FINISH();
8806 IEM_MC_END();
8807 }
8808}
8809
8810
8811/** Opcode 0x0f 0x9e. */
8812FNIEMOP_DEF(iemOp_setle_Eb)
8813{
8814 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8815 IEMOP_HLP_MIN_386();
8816 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8817
8818 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8819 * any way. AMD says it's "unused", whatever that means. We're
8820 * ignoring for now. */
8821 if (IEM_IS_MODRM_REG_MODE(bRm))
8822 {
8823 /* register target */
8824 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8826 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8827 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8828 } IEM_MC_ELSE() {
8829 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8830 } IEM_MC_ENDIF();
8831 IEM_MC_ADVANCE_RIP_AND_FINISH();
8832 IEM_MC_END();
8833 }
8834 else
8835 {
8836 /* memory target */
8837 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8838 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8841 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8842 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8843 } IEM_MC_ELSE() {
8844 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8845 } IEM_MC_ENDIF();
8846 IEM_MC_ADVANCE_RIP_AND_FINISH();
8847 IEM_MC_END();
8848 }
8849}
8850
8851
8852/** Opcode 0x0f 0x9f. */
8853FNIEMOP_DEF(iemOp_setnle_Eb)
8854{
8855 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8856 IEMOP_HLP_MIN_386();
8857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8858
8859 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8860 * any way. AMD says it's "unused", whatever that means. We're
8861 * ignoring for now. */
8862 if (IEM_IS_MODRM_REG_MODE(bRm))
8863 {
8864 /* register target */
8865 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8867 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8868 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8869 } IEM_MC_ELSE() {
8870 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8871 } IEM_MC_ENDIF();
8872 IEM_MC_ADVANCE_RIP_AND_FINISH();
8873 IEM_MC_END();
8874 }
8875 else
8876 {
8877 /* memory target */
8878 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8882 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8883 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8884 } IEM_MC_ELSE() {
8885 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8886 } IEM_MC_ENDIF();
8887 IEM_MC_ADVANCE_RIP_AND_FINISH();
8888 IEM_MC_END();
8889 }
8890}
8891
8892
8893/** Opcode 0x0f 0xa0. */
8894FNIEMOP_DEF(iemOp_push_fs)
8895{
8896 IEMOP_MNEMONIC(push_fs, "push fs");
8897 IEMOP_HLP_MIN_386();
8898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8899 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8900}
8901
8902
8903/** Opcode 0x0f 0xa1. */
8904FNIEMOP_DEF(iemOp_pop_fs)
8905{
8906 IEMOP_MNEMONIC(pop_fs, "pop fs");
8907 IEMOP_HLP_MIN_386();
8908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8909 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8910}
8911
8912
8913/** Opcode 0x0f 0xa2. */
8914FNIEMOP_DEF(iemOp_cpuid)
8915{
8916 IEMOP_MNEMONIC(cpuid, "cpuid");
8917 IEMOP_HLP_MIN_486(); /* not all 486es. */
8918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8919 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_cpuid);
8920}
8921
8922
8923/**
8924 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8925 * iemOp_bts_Ev_Gv.
8926 */
8927
8928#define IEMOP_BODY_BIT_Ev_Gv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
8929 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8930 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8931 \
8932 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8933 { \
8934 /* register destination. */ \
8935 switch (pVCpu->iem.s.enmEffOpSize) \
8936 { \
8937 case IEMMODE_16BIT: \
8938 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
8939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8940 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8941 IEM_MC_ARG(uint16_t, u16Src, 1); \
8942 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8943 \
8944 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8945 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
8946 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8947 IEM_MC_REF_EFLAGS(pEFlags); \
8948 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
8949 \
8950 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8951 IEM_MC_END(); \
8952 break; \
8953 \
8954 case IEMMODE_32BIT: \
8955 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
8956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8957 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8958 IEM_MC_ARG(uint32_t, u32Src, 1); \
8959 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8960 \
8961 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8962 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
8963 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8964 IEM_MC_REF_EFLAGS(pEFlags); \
8965 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
8966 \
8967 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
8968 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8969 IEM_MC_END(); \
8970 break; \
8971 \
8972 case IEMMODE_64BIT: \
8973 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT); \
8974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8975 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8976 IEM_MC_ARG(uint64_t, u64Src, 1); \
8977 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8978 \
8979 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8980 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
8981 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8982 IEM_MC_REF_EFLAGS(pEFlags); \
8983 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
8984 \
8985 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8986 IEM_MC_END(); \
8987 break; \
8988 \
8989 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8990 } \
8991 } \
8992 else \
8993 { \
8994 /* memory destination. */ \
8995 /** @todo test negative bit offsets! */ \
8996 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
8997 { \
8998 switch (pVCpu->iem.s.enmEffOpSize) \
8999 { \
9000 case IEMMODE_16BIT: \
9001 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386); \
9002 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9003 IEM_MC_ARG(uint16_t, u16Src, 1); \
9004 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9006 IEM_MC_LOCAL(int16_t, i16AddrAdj); \
9007 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9008 \
9009 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9010 IEMOP_HLP_DONE_DECODING(); \
9011 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9012 IEM_MC_ASSIGN(i16AddrAdj, u16Src); \
9013 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9014 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9015 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9016 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9017 IEM_MC_FETCH_EFLAGS(EFlags); \
9018 \
9019 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9020 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9021 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
9022 \
9023 IEM_MC_COMMIT_EFLAGS(EFlags); \
9024 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9025 IEM_MC_END(); \
9026 break; \
9027 \
9028 case IEMMODE_32BIT: \
9029 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386); \
9030 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9031 IEM_MC_ARG(uint32_t, u32Src, 1); \
9032 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9033 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9034 IEM_MC_LOCAL(int32_t, i32AddrAdj); \
9035 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9036 \
9037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9038 IEMOP_HLP_DONE_DECODING(); \
9039 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9040 IEM_MC_ASSIGN(i32AddrAdj, u32Src); \
9041 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9042 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9043 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9044 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9045 IEM_MC_FETCH_EFLAGS(EFlags); \
9046 \
9047 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9048 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9049 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
9050 \
9051 IEM_MC_COMMIT_EFLAGS(EFlags); \
9052 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9053 IEM_MC_END(); \
9054 break; \
9055 \
9056 case IEMMODE_64BIT: \
9057 IEM_MC_BEGIN(3, 5, IEM_MC_F_64BIT); \
9058 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9059 IEM_MC_ARG(uint64_t, u64Src, 1); \
9060 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9061 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9062 IEM_MC_LOCAL(int64_t, i64AddrAdj); \
9063 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9064 \
9065 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9066 IEMOP_HLP_DONE_DECODING(); \
9067 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9068 IEM_MC_ASSIGN(i64AddrAdj, u64Src); \
9069 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9070 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9071 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9072 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9073 IEM_MC_FETCH_EFLAGS(EFlags); \
9074 \
9075 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9076 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9077 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
9078 \
9079 IEM_MC_COMMIT_EFLAGS(EFlags); \
9080 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9081 IEM_MC_END(); \
9082 break; \
9083 \
9084 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9085 } \
9086 } \
9087 else \
9088 { \
9089 (void)0
9090/* Separate macro to work around parsing issue in IEMAllInstPython.py */
9091#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
9092 switch (pVCpu->iem.s.enmEffOpSize) \
9093 { \
9094 case IEMMODE_16BIT: \
9095 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386); \
9096 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9097 IEM_MC_ARG(uint16_t, u16Src, 1); \
9098 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9100 IEM_MC_LOCAL(int16_t, i16AddrAdj); \
9101 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9102 \
9103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9104 IEMOP_HLP_DONE_DECODING(); \
9105 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9106 IEM_MC_ASSIGN(i16AddrAdj, u16Src); \
9107 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9108 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9109 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9110 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9111 IEM_MC_FETCH_EFLAGS(EFlags); \
9112 \
9113 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9114 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
9115 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
9116 \
9117 IEM_MC_COMMIT_EFLAGS(EFlags); \
9118 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9119 IEM_MC_END(); \
9120 break; \
9121 \
9122 case IEMMODE_32BIT: \
9123 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386); \
9124 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9125 IEM_MC_ARG(uint32_t, u32Src, 1); \
9126 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9128 IEM_MC_LOCAL(int32_t, i32AddrAdj); \
9129 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9130 \
9131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9132 IEMOP_HLP_DONE_DECODING(); \
9133 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9134 IEM_MC_ASSIGN(i32AddrAdj, u32Src); \
9135 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9136 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9137 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9138 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9139 IEM_MC_FETCH_EFLAGS(EFlags); \
9140 \
9141 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9142 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
9143 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
9144 \
9145 IEM_MC_COMMIT_EFLAGS(EFlags); \
9146 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9147 IEM_MC_END(); \
9148 break; \
9149 \
9150 case IEMMODE_64BIT: \
9151 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT); \
9152 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9153 IEM_MC_ARG(uint64_t, u64Src, 1); \
9154 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9156 IEM_MC_LOCAL(int64_t, i64AddrAdj); \
9157 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9158 \
9159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9160 IEMOP_HLP_DONE_DECODING(); \
9161 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9162 IEM_MC_ASSIGN(i64AddrAdj, u64Src); \
9163 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9164 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9165 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9166 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9167 IEM_MC_FETCH_EFLAGS(EFlags); \
9168 \
9169 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9170 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
9171 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
9172 \
9173 IEM_MC_COMMIT_EFLAGS(EFlags); \
9174 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9175 IEM_MC_END(); \
9176 break; \
9177 \
9178 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9179 } \
9180 } \
9181 } \
9182 (void)0
9183
9184/* Read-only version (bt). */
9185#define IEMOP_BODY_BIT_Ev_Gv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
9186 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9187 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
9188 \
9189 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9190 { \
9191 /* register destination. */ \
9192 switch (pVCpu->iem.s.enmEffOpSize) \
9193 { \
9194 case IEMMODE_16BIT: \
9195 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
9196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9197 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9198 IEM_MC_ARG(uint16_t, u16Src, 1); \
9199 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9200 \
9201 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9202 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9203 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9204 IEM_MC_REF_EFLAGS(pEFlags); \
9205 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9206 \
9207 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9208 IEM_MC_END(); \
9209 break; \
9210 \
9211 case IEMMODE_32BIT: \
9212 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
9213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9214 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9215 IEM_MC_ARG(uint32_t, u32Src, 1); \
9216 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9217 \
9218 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9219 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9220 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9221 IEM_MC_REF_EFLAGS(pEFlags); \
9222 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9223 \
9224 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9225 IEM_MC_END(); \
9226 break; \
9227 \
9228 case IEMMODE_64BIT: \
9229 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT); \
9230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9231 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9232 IEM_MC_ARG(uint64_t, u64Src, 1); \
9233 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9234 \
9235 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9236 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9237 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9238 IEM_MC_REF_EFLAGS(pEFlags); \
9239 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9240 \
9241 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9242 IEM_MC_END(); \
9243 break; \
9244 \
9245 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9246 } \
9247 } \
9248 else \
9249 { \
9250 /* memory destination. */ \
9251 /** @todo test negative bit offsets! */ \
9252 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9253 { \
9254 switch (pVCpu->iem.s.enmEffOpSize) \
9255 { \
9256 case IEMMODE_16BIT: \
9257 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386); \
9258 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9259 IEM_MC_ARG(uint16_t, u16Src, 1); \
9260 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9262 IEM_MC_LOCAL(int16_t, i16AddrAdj); \
9263 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9264 \
9265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9266 IEMOP_HLP_DONE_DECODING(); \
9267 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9268 IEM_MC_ASSIGN(i16AddrAdj, u16Src); \
9269 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9270 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9271 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9272 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9273 IEM_MC_FETCH_EFLAGS(EFlags); \
9274 \
9275 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9276 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9277 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
9278 \
9279 IEM_MC_COMMIT_EFLAGS(EFlags); \
9280 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9281 IEM_MC_END(); \
9282 break; \
9283 \
9284 case IEMMODE_32BIT: \
9285 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386); \
9286 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9287 IEM_MC_ARG(uint32_t, u32Src, 1); \
9288 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9290 IEM_MC_LOCAL(int32_t, i32AddrAdj); \
9291 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9292 \
9293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9294 IEMOP_HLP_DONE_DECODING(); \
9295 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9296 IEM_MC_ASSIGN(i32AddrAdj, u32Src); \
9297 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9298 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9299 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9300 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9301 IEM_MC_FETCH_EFLAGS(EFlags); \
9302 \
9303 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9304 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9305 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
9306 \
9307 IEM_MC_COMMIT_EFLAGS(EFlags); \
9308 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9309 IEM_MC_END(); \
9310 break; \
9311 \
9312 case IEMMODE_64BIT: \
9313 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT); \
9314 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9315 IEM_MC_ARG(uint64_t, u64Src, 1); \
9316 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9318 IEM_MC_LOCAL(int64_t, i64AddrAdj); \
9319 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9320 \
9321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9322 IEMOP_HLP_DONE_DECODING(); \
9323 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9324 IEM_MC_ASSIGN(i64AddrAdj, u64Src); \
9325 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9326 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9327 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9328 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9329 IEM_MC_FETCH_EFLAGS(EFlags); \
9330 \
9331 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9332 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9333 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
9334 \
9335 IEM_MC_COMMIT_EFLAGS(EFlags); \
9336 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9337 IEM_MC_END(); \
9338 break; \
9339 \
9340 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9341 } \
9342 } \
9343 else \
9344 { \
9345 IEMOP_HLP_DONE_DECODING(); \
9346 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9347 } \
9348 } \
9349 (void)0
9350
9351
9352/** Opcode 0x0f 0xa3. */
9353FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9354{
9355 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9356 IEMOP_HLP_MIN_386();
9357 IEMOP_BODY_BIT_Ev_Gv_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
9358}
9359
9360
9361/**
9362 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9363 */
9364FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
9365{
9366 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9367 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9368
9369 if (IEM_IS_MODRM_REG_MODE(bRm))
9370 {
9371 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9372
9373 switch (pVCpu->iem.s.enmEffOpSize)
9374 {
9375 case IEMMODE_16BIT:
9376 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386);
9377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9378 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9379 IEM_MC_ARG(uint16_t, u16Src, 1);
9380 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9381 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9382
9383 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9384 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9385 IEM_MC_REF_EFLAGS(pEFlags);
9386 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9387
9388 IEM_MC_ADVANCE_RIP_AND_FINISH();
9389 IEM_MC_END();
9390 break;
9391
9392 case IEMMODE_32BIT:
9393 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386);
9394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9395 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9396 IEM_MC_ARG(uint32_t, u32Src, 1);
9397 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9398 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9399
9400 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9401 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9402 IEM_MC_REF_EFLAGS(pEFlags);
9403 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9404
9405 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9406 IEM_MC_ADVANCE_RIP_AND_FINISH();
9407 IEM_MC_END();
9408 break;
9409
9410 case IEMMODE_64BIT:
9411 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT);
9412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9413 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9414 IEM_MC_ARG(uint64_t, u64Src, 1);
9415 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9416 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9417
9418 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9419 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9420 IEM_MC_REF_EFLAGS(pEFlags);
9421 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9422
9423 IEM_MC_ADVANCE_RIP_AND_FINISH();
9424 IEM_MC_END();
9425 break;
9426
9427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9428 }
9429 }
9430 else
9431 {
9432 switch (pVCpu->iem.s.enmEffOpSize)
9433 {
9434 case IEMMODE_16BIT:
9435 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386);
9436 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9437 IEM_MC_ARG(uint16_t, u16Src, 1);
9438 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9439 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9441 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9442
9443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9444 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9445 IEM_MC_ASSIGN(cShiftArg, cShift);
9446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9447 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9448 IEM_MC_FETCH_EFLAGS(EFlags);
9449 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9450 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9451
9452 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
9453 IEM_MC_COMMIT_EFLAGS(EFlags);
9454 IEM_MC_ADVANCE_RIP_AND_FINISH();
9455 IEM_MC_END();
9456 break;
9457
9458 case IEMMODE_32BIT:
9459 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386);
9460 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9461 IEM_MC_ARG(uint32_t, u32Src, 1);
9462 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9463 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9465 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9466
9467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9468 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9469 IEM_MC_ASSIGN(cShiftArg, cShift);
9470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9471 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9472 IEM_MC_FETCH_EFLAGS(EFlags);
9473 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9474 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9475
9476 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
9477 IEM_MC_COMMIT_EFLAGS(EFlags);
9478 IEM_MC_ADVANCE_RIP_AND_FINISH();
9479 IEM_MC_END();
9480 break;
9481
9482 case IEMMODE_64BIT:
9483 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT);
9484 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9485 IEM_MC_ARG(uint64_t, u64Src, 1);
9486 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9487 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9489 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9490
9491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9492 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9493 IEM_MC_ASSIGN(cShiftArg, cShift);
9494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9495 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9496 IEM_MC_FETCH_EFLAGS(EFlags);
9497 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9498 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9499
9500 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
9501 IEM_MC_COMMIT_EFLAGS(EFlags);
9502 IEM_MC_ADVANCE_RIP_AND_FINISH();
9503 IEM_MC_END();
9504 break;
9505
9506 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9507 }
9508 }
9509}
9510
9511
9512/**
9513 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9514 */
9515FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
9516{
9517 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9518 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9519
9520 if (IEM_IS_MODRM_REG_MODE(bRm))
9521 {
9522 switch (pVCpu->iem.s.enmEffOpSize)
9523 {
9524 case IEMMODE_16BIT:
9525 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386);
9526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9527 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9528 IEM_MC_ARG(uint16_t, u16Src, 1);
9529 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9530 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9531
9532 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9533 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9534 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9535 IEM_MC_REF_EFLAGS(pEFlags);
9536 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9537
9538 IEM_MC_ADVANCE_RIP_AND_FINISH();
9539 IEM_MC_END();
9540 break;
9541
9542 case IEMMODE_32BIT:
9543 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386);
9544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9545 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9546 IEM_MC_ARG(uint32_t, u32Src, 1);
9547 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9548 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9549
9550 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9551 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9552 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9553 IEM_MC_REF_EFLAGS(pEFlags);
9554 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9555
9556 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9557 IEM_MC_ADVANCE_RIP_AND_FINISH();
9558 IEM_MC_END();
9559 break;
9560
9561 case IEMMODE_64BIT:
9562 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT);
9563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9564 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9565 IEM_MC_ARG(uint64_t, u64Src, 1);
9566 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9567 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9568
9569 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9570 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9571 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9572 IEM_MC_REF_EFLAGS(pEFlags);
9573 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9574
9575 IEM_MC_ADVANCE_RIP_AND_FINISH();
9576 IEM_MC_END();
9577 break;
9578
9579 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9580 }
9581 }
9582 else
9583 {
9584 switch (pVCpu->iem.s.enmEffOpSize)
9585 {
9586 case IEMMODE_16BIT:
9587 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386);
9588 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9589 IEM_MC_ARG(uint16_t, u16Src, 1);
9590 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9591 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9593 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9594
9595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9597 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9598 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9599 IEM_MC_FETCH_EFLAGS(EFlags);
9600 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9601 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9602
9603 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
9604 IEM_MC_COMMIT_EFLAGS(EFlags);
9605 IEM_MC_ADVANCE_RIP_AND_FINISH();
9606 IEM_MC_END();
9607 break;
9608
9609 case IEMMODE_32BIT:
9610 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386);
9611 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9612 IEM_MC_ARG(uint32_t, u32Src, 1);
9613 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9614 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9616 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9617
9618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9620 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9621 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9622 IEM_MC_FETCH_EFLAGS(EFlags);
9623 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9624 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9625
9626 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
9627 IEM_MC_COMMIT_EFLAGS(EFlags);
9628 IEM_MC_ADVANCE_RIP_AND_FINISH();
9629 IEM_MC_END();
9630 break;
9631
9632 case IEMMODE_64BIT:
9633 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT);
9634 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9635 IEM_MC_ARG(uint64_t, u64Src, 1);
9636 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9637 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9639 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9640
9641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9643 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9644 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9645 IEM_MC_FETCH_EFLAGS(EFlags);
9646 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9647 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9648
9649 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
9650 IEM_MC_COMMIT_EFLAGS(EFlags);
9651 IEM_MC_ADVANCE_RIP_AND_FINISH();
9652 IEM_MC_END();
9653 break;
9654
9655 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9656 }
9657 }
9658}
9659
9660
9661
9662/** Opcode 0x0f 0xa4. */
9663FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9664{
9665 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9666 IEMOP_HLP_MIN_386();
9667 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9668}
9669
9670
9671/** Opcode 0x0f 0xa5. */
9672FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9673{
9674 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9675 IEMOP_HLP_MIN_386();
9676 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9677}
9678
9679
9680/** Opcode 0x0f 0xa8. */
9681FNIEMOP_DEF(iemOp_push_gs)
9682{
9683 IEMOP_MNEMONIC(push_gs, "push gs");
9684 IEMOP_HLP_MIN_386();
9685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9686 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9687}
9688
9689
9690/** Opcode 0x0f 0xa9. */
9691FNIEMOP_DEF(iemOp_pop_gs)
9692{
9693 IEMOP_MNEMONIC(pop_gs, "pop gs");
9694 IEMOP_HLP_MIN_386();
9695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9696 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9697}
9698
9699
9700/** Opcode 0x0f 0xaa. */
9701FNIEMOP_DEF(iemOp_rsm)
9702{
9703 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9704 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9706 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
9707 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
9708 iemCImpl_rsm);
9709}
9710
9711
9712
9713/** Opcode 0x0f 0xab. */
9714FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9715{
9716 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9717 IEMOP_HLP_MIN_386();
9718 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
9719 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9720}
9721
9722
9723/** Opcode 0x0f 0xac. */
9724FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9725{
9726 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9727 IEMOP_HLP_MIN_386();
9728 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9729}
9730
9731
9732/** Opcode 0x0f 0xad. */
9733FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9734{
9735 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9736 IEMOP_HLP_MIN_386();
9737 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9738}
9739
9740
9741/** Opcode 0x0f 0xae mem/0. */
9742FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9743{
9744 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9745 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9746 IEMOP_RAISE_INVALID_OPCODE_RET();
9747
9748 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_PENTIUM_II);
9749 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9750 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9751 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9754 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9755 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9756 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9757 IEM_MC_END();
9758}
9759
9760
9761/** Opcode 0x0f 0xae mem/1. */
9762FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9763{
9764 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9765 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9766 IEMOP_RAISE_INVALID_OPCODE_RET();
9767
9768 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_PENTIUM_II);
9769 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9770 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9771 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9774 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9775 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9776 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9777 IEM_MC_END();
9778}
9779
9780
9781/**
9782 * @opmaps grp15
9783 * @opcode !11/2
9784 * @oppfx none
9785 * @opcpuid sse
9786 * @opgroup og_sse_mxcsrsm
9787 * @opxcpttype 5
9788 * @optest op1=0 -> mxcsr=0
9789 * @optest op1=0x2083 -> mxcsr=0x2083
9790 * @optest op1=0xfffffffe -> value.xcpt=0xd
9791 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9792 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9793 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9794 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9795 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9796 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9797 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9798 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9799 */
9800FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9801{
9802 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9803 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9804 IEMOP_RAISE_INVALID_OPCODE_RET();
9805
9806 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_PENTIUM_II);
9807 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9808 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9811 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9812 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9813 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9814 IEM_MC_END();
9815}
9816
9817
9818/**
9819 * @opmaps grp15
9820 * @opcode !11/3
9821 * @oppfx none
9822 * @opcpuid sse
9823 * @opgroup og_sse_mxcsrsm
9824 * @opxcpttype 5
9825 * @optest mxcsr=0 -> op1=0
9826 * @optest mxcsr=0x2083 -> op1=0x2083
9827 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9828 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9829 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9830 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9831 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9832 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9833 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9834 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9835 */
9836FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9837{
9838 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9839 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9840 IEMOP_RAISE_INVALID_OPCODE_RET();
9841
9842 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_PENTIUM_II);
9843 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9844 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9847 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9848 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9849 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9850 IEM_MC_END();
9851}
9852
9853
9854/**
9855 * @opmaps grp15
9856 * @opcode !11/4
9857 * @oppfx none
9858 * @opcpuid xsave
9859 * @opgroup og_system
9860 * @opxcpttype none
9861 */
9862FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9863{
9864 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9865 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9866 IEMOP_RAISE_INVALID_OPCODE_RET();
9867
9868 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_CORE);
9869 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9870 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9871 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9874 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9875 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9876 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9877 IEM_MC_END();
9878}
9879
9880
9881/**
9882 * @opmaps grp15
9883 * @opcode !11/5
9884 * @oppfx none
9885 * @opcpuid xsave
9886 * @opgroup og_system
9887 * @opxcpttype none
9888 */
9889FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9890{
9891 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9892 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9893 IEMOP_RAISE_INVALID_OPCODE_RET();
9894
9895 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_CORE);
9896 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9897 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9898 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9901 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9902 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9903 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9904 IEM_MC_END();
9905}
9906
9907/** Opcode 0x0f 0xae mem/6. */
9908FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9909
9910/**
9911 * @opmaps grp15
9912 * @opcode !11/7
9913 * @oppfx none
9914 * @opcpuid clfsh
9915 * @opgroup og_cachectl
9916 * @optest op1=1 ->
9917 */
9918FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9919{
9920 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9921 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9922 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9923
9924 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
9925 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9926 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9929 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9930 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9931 IEM_MC_END();
9932}
9933
9934/**
9935 * @opmaps grp15
9936 * @opcode !11/7
9937 * @oppfx 0x66
9938 * @opcpuid clflushopt
9939 * @opgroup og_cachectl
9940 * @optest op1=1 ->
9941 */
9942FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9943{
9944 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9945 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9946 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9947
9948 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
9949 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9950 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9953 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9954 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9955 IEM_MC_END();
9956}
9957
9958
9959/** Opcode 0x0f 0xae 11b/5. */
9960FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9961{
9962 RT_NOREF_PV(bRm);
9963 IEMOP_MNEMONIC(lfence, "lfence");
9964 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER);
9965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9966#ifdef RT_ARCH_ARM64
9967 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9968#else
9969 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9970 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9971 else
9972 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9973#endif
9974 IEM_MC_ADVANCE_RIP_AND_FINISH();
9975 IEM_MC_END();
9976}
9977
9978
9979/** Opcode 0x0f 0xae 11b/6. */
9980FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9981{
9982 RT_NOREF_PV(bRm);
9983 IEMOP_MNEMONIC(mfence, "mfence");
9984 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER);
9985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9986#ifdef RT_ARCH_ARM64
9987 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9988#else
9989 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9990 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9991 else
9992 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9993#endif
9994 IEM_MC_ADVANCE_RIP_AND_FINISH();
9995 IEM_MC_END();
9996}
9997
9998
9999/** Opcode 0x0f 0xae 11b/7. */
10000FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
10001{
10002 RT_NOREF_PV(bRm);
10003 IEMOP_MNEMONIC(sfence, "sfence");
10004 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER);
10005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
10006#ifdef RT_ARCH_ARM64
10007 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
10008#else
10009 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
10010 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
10011 else
10012 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
10013#endif
10014 IEM_MC_ADVANCE_RIP_AND_FINISH();
10015 IEM_MC_END();
10016}
10017
10018
10019/** Opcode 0xf3 0x0f 0xae 11b/0. */
10020FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
10021{
10022 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
10023 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10024 {
10025 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT);
10026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10027 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10028 IEM_MC_ARG(uint64_t, u64Dst, 0);
10029 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
10030 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10031 IEM_MC_ADVANCE_RIP_AND_FINISH();
10032 IEM_MC_END();
10033 }
10034 else
10035 {
10036 IEM_MC_BEGIN(1, 0, IEM_MC_F_NOT_286_OR_OLDER);
10037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10038 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10039 IEM_MC_ARG(uint32_t, u32Dst, 0);
10040 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
10041 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10042 IEM_MC_ADVANCE_RIP_AND_FINISH();
10043 IEM_MC_END();
10044 }
10045}
10046
10047
10048/** Opcode 0xf3 0x0f 0xae 11b/1. */
10049FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
10050{
10051 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
10052 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10053 {
10054 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT);
10055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10056 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10057 IEM_MC_ARG(uint64_t, u64Dst, 0);
10058 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
10059 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10060 IEM_MC_ADVANCE_RIP_AND_FINISH();
10061 IEM_MC_END();
10062 }
10063 else
10064 {
10065 IEM_MC_BEGIN(1, 0, IEM_MC_F_NOT_286_OR_OLDER);
10066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10067 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10068 IEM_MC_ARG(uint32_t, u32Dst, 0);
10069 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
10070 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10071 IEM_MC_ADVANCE_RIP_AND_FINISH();
10072 IEM_MC_END();
10073 }
10074}
10075
10076
10077/** Opcode 0xf3 0x0f 0xae 11b/2. */
10078FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
10079{
10080 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
10081 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10082 {
10083 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT);
10084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10085 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10086 IEM_MC_ARG(uint64_t, u64Dst, 0);
10087 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10088 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10089 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
10090 IEM_MC_ADVANCE_RIP_AND_FINISH();
10091 IEM_MC_END();
10092 }
10093 else
10094 {
10095 IEM_MC_BEGIN(1, 0, IEM_MC_F_NOT_286_OR_OLDER);
10096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10097 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10098 IEM_MC_ARG(uint32_t, u32Dst, 0);
10099 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10100 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
10101 IEM_MC_ADVANCE_RIP_AND_FINISH();
10102 IEM_MC_END();
10103 }
10104}
10105
10106
10107/** Opcode 0xf3 0x0f 0xae 11b/3. */
10108FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
10109{
10110 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
10111 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10112 {
10113 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT);
10114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10115 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10116 IEM_MC_ARG(uint64_t, u64Dst, 0);
10117 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10118 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10119 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
10120 IEM_MC_ADVANCE_RIP_AND_FINISH();
10121 IEM_MC_END();
10122 }
10123 else
10124 {
10125 IEM_MC_BEGIN(1, 0, IEM_MC_F_NOT_286_OR_OLDER);
10126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10127 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10128 IEM_MC_ARG(uint32_t, u32Dst, 0);
10129 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10130 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10131 IEM_MC_ADVANCE_RIP_AND_FINISH();
10132 IEM_MC_END();
10133 }
10134}
10135
10136
10137/**
10138 * Group 15 jump table for register variant.
10139 */
10140IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10141{ /* pfx: none, 066h, 0f3h, 0f2h */
10142 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10143 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10144 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10145 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10146 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10147 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10148 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10149 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10150};
10151AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10152
10153
10154/**
10155 * Group 15 jump table for memory variant.
10156 */
10157IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10158{ /* pfx: none, 066h, 0f3h, 0f2h */
10159 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10160 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10161 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10162 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10163 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10164 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10165 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10166 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10167};
10168AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10169
10170
10171/** Opcode 0x0f 0xae. */
10172FNIEMOP_DEF(iemOp_Grp15)
10173{
10174 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10176 if (IEM_IS_MODRM_REG_MODE(bRm))
10177 /* register, register */
10178 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10179 + pVCpu->iem.s.idxPrefix], bRm);
10180 /* memory, register */
10181 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10182 + pVCpu->iem.s.idxPrefix], bRm);
10183}
10184
10185
10186/** Opcode 0x0f 0xaf. */
10187FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10188{
10189 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10190 IEMOP_HLP_MIN_386();
10191 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10192 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10193 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
10194}
10195
10196
10197/** Opcode 0x0f 0xb0. */
10198FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10199{
10200 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10201 IEMOP_HLP_MIN_486();
10202 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10203
10204 if (IEM_IS_MODRM_REG_MODE(bRm))
10205 {
10206 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486);
10207 IEMOP_HLP_DONE_DECODING();
10208 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10209 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10210 IEM_MC_ARG(uint8_t, u8Src, 2);
10211 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10212
10213 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10214 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10215 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10216 IEM_MC_REF_EFLAGS(pEFlags);
10217 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10218 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10219 else
10220 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10221
10222 IEM_MC_ADVANCE_RIP_AND_FINISH();
10223 IEM_MC_END();
10224 }
10225 else
10226 {
10227 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486);
10228 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10229 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10230 IEM_MC_ARG(uint8_t, u8Src, 2);
10231 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10233 IEM_MC_LOCAL(uint8_t, u8Al);
10234 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10235
10236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10237 IEMOP_HLP_DONE_DECODING();
10238 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10239 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10240 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
10241 IEM_MC_FETCH_EFLAGS(EFlags);
10242 IEM_MC_REF_LOCAL(pu8Al, u8Al);
10243 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10244 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10245 else
10246 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10247
10248 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
10249 IEM_MC_COMMIT_EFLAGS(EFlags);
10250 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
10251 IEM_MC_ADVANCE_RIP_AND_FINISH();
10252 IEM_MC_END();
10253 }
10254}
10255
10256/** Opcode 0x0f 0xb1. */
10257FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10258{
10259 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10260 IEMOP_HLP_MIN_486();
10261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10262
10263 if (IEM_IS_MODRM_REG_MODE(bRm))
10264 {
10265 switch (pVCpu->iem.s.enmEffOpSize)
10266 {
10267 case IEMMODE_16BIT:
10268 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486);
10269 IEMOP_HLP_DONE_DECODING();
10270 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10271 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10272 IEM_MC_ARG(uint16_t, u16Src, 2);
10273 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10274
10275 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10276 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10277 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10278 IEM_MC_REF_EFLAGS(pEFlags);
10279 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10280 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10281 else
10282 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10283
10284 IEM_MC_ADVANCE_RIP_AND_FINISH();
10285 IEM_MC_END();
10286 break;
10287
10288 case IEMMODE_32BIT:
10289 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486);
10290 IEMOP_HLP_DONE_DECODING();
10291 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10292 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10293 IEM_MC_ARG(uint32_t, u32Src, 2);
10294 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10295
10296 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10297 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10298 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10299 IEM_MC_REF_EFLAGS(pEFlags);
10300 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10301 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10302 else
10303 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10304
10305 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10306 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10307 } IEM_MC_ELSE() {
10308 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
10309 } IEM_MC_ENDIF();
10310
10311 IEM_MC_ADVANCE_RIP_AND_FINISH();
10312 IEM_MC_END();
10313 break;
10314
10315 case IEMMODE_64BIT:
10316 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT);
10317 IEMOP_HLP_DONE_DECODING();
10318 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10319 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10320#ifdef RT_ARCH_X86
10321 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10322#else
10323 IEM_MC_ARG(uint64_t, u64Src, 2);
10324#endif
10325 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10326
10327 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10328 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10329 IEM_MC_REF_EFLAGS(pEFlags);
10330#ifdef RT_ARCH_X86
10331 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10333 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10334 else
10335 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10336#else
10337 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10338 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10339 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10340 else
10341 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10342#endif
10343
10344 IEM_MC_ADVANCE_RIP_AND_FINISH();
10345 IEM_MC_END();
10346 break;
10347
10348 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10349 }
10350 }
10351 else
10352 {
10353 switch (pVCpu->iem.s.enmEffOpSize)
10354 {
10355 case IEMMODE_16BIT:
10356 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486);
10357 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10358 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10359 IEM_MC_ARG(uint16_t, u16Src, 2);
10360 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10362 IEM_MC_LOCAL(uint16_t, u16Ax);
10363 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10364
10365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10366 IEMOP_HLP_DONE_DECODING();
10367 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10368 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10369 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
10370 IEM_MC_FETCH_EFLAGS(EFlags);
10371 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
10372 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10373 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10374 else
10375 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10376
10377 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
10378 IEM_MC_COMMIT_EFLAGS(EFlags);
10379 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
10380 IEM_MC_ADVANCE_RIP_AND_FINISH();
10381 IEM_MC_END();
10382 break;
10383
10384 case IEMMODE_32BIT:
10385 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486);
10386 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10387 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10388 IEM_MC_ARG(uint32_t, u32Src, 2);
10389 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10391 IEM_MC_LOCAL(uint32_t, u32Eax);
10392 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10393
10394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10395 IEMOP_HLP_DONE_DECODING();
10396 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10397 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10398 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
10399 IEM_MC_FETCH_EFLAGS(EFlags);
10400 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
10401 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10402 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10403 else
10404 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10405
10406 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
10407 IEM_MC_COMMIT_EFLAGS(EFlags);
10408
10409 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10410 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
10411 } IEM_MC_ENDIF();
10412
10413 IEM_MC_ADVANCE_RIP_AND_FINISH();
10414 IEM_MC_END();
10415 break;
10416
10417 case IEMMODE_64BIT:
10418 IEM_MC_BEGIN(4, 4, IEM_MC_F_64BIT);
10419 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10420 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10421#ifdef RT_ARCH_X86
10422 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10423#else
10424 IEM_MC_ARG(uint64_t, u64Src, 2);
10425#endif
10426 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10428 IEM_MC_LOCAL(uint64_t, u64Rax);
10429 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10430
10431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10432 IEMOP_HLP_DONE_DECODING();
10433 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10434 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
10435 IEM_MC_FETCH_EFLAGS(EFlags);
10436 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
10437#ifdef RT_ARCH_X86
10438 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10439 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10440 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10441 else
10442 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10443#else
10444 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10445 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10446 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10447 else
10448 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10449#endif
10450
10451 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
10452 IEM_MC_COMMIT_EFLAGS(EFlags);
10453 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
10454 IEM_MC_ADVANCE_RIP_AND_FINISH();
10455 IEM_MC_END();
10456 break;
10457
10458 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10459 }
10460 }
10461}
10462
10463
10464/** Opcode 0x0f 0xb2. */
10465FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10466{
10467 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10468 IEMOP_HLP_MIN_386();
10469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10470 if (IEM_IS_MODRM_REG_MODE(bRm))
10471 IEMOP_RAISE_INVALID_OPCODE_RET();
10472 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10473}
10474
10475
10476/** Opcode 0x0f 0xb3. */
10477FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10478{
10479 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10480 IEMOP_HLP_MIN_386();
10481 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
10482 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10483}
10484
10485
10486/** Opcode 0x0f 0xb4. */
10487FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10488{
10489 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10490 IEMOP_HLP_MIN_386();
10491 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10492 if (IEM_IS_MODRM_REG_MODE(bRm))
10493 IEMOP_RAISE_INVALID_OPCODE_RET();
10494 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10495}
10496
10497
10498/** Opcode 0x0f 0xb5. */
10499FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10500{
10501 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10502 IEMOP_HLP_MIN_386();
10503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10504 if (IEM_IS_MODRM_REG_MODE(bRm))
10505 IEMOP_RAISE_INVALID_OPCODE_RET();
10506 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10507}
10508
10509
10510/** Opcode 0x0f 0xb6. */
10511FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10512{
10513 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10514 IEMOP_HLP_MIN_386();
10515
10516 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10517
10518 /*
10519 * If rm is denoting a register, no more instruction bytes.
10520 */
10521 if (IEM_IS_MODRM_REG_MODE(bRm))
10522 {
10523 switch (pVCpu->iem.s.enmEffOpSize)
10524 {
10525 case IEMMODE_16BIT:
10526 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
10527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10528 IEM_MC_LOCAL(uint16_t, u16Value);
10529 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10530 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10531 IEM_MC_ADVANCE_RIP_AND_FINISH();
10532 IEM_MC_END();
10533 break;
10534
10535 case IEMMODE_32BIT:
10536 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
10537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10538 IEM_MC_LOCAL(uint32_t, u32Value);
10539 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10540 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10541 IEM_MC_ADVANCE_RIP_AND_FINISH();
10542 IEM_MC_END();
10543 break;
10544
10545 case IEMMODE_64BIT:
10546 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
10547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10548 IEM_MC_LOCAL(uint64_t, u64Value);
10549 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10550 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10551 IEM_MC_ADVANCE_RIP_AND_FINISH();
10552 IEM_MC_END();
10553 break;
10554
10555 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10556 }
10557 }
10558 else
10559 {
10560 /*
10561 * We're loading a register from memory.
10562 */
10563 switch (pVCpu->iem.s.enmEffOpSize)
10564 {
10565 case IEMMODE_16BIT:
10566 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
10567 IEM_MC_LOCAL(uint16_t, u16Value);
10568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10571 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10572 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10573 IEM_MC_ADVANCE_RIP_AND_FINISH();
10574 IEM_MC_END();
10575 break;
10576
10577 case IEMMODE_32BIT:
10578 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
10579 IEM_MC_LOCAL(uint32_t, u32Value);
10580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10583 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10584 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10585 IEM_MC_ADVANCE_RIP_AND_FINISH();
10586 IEM_MC_END();
10587 break;
10588
10589 case IEMMODE_64BIT:
10590 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
10591 IEM_MC_LOCAL(uint64_t, u64Value);
10592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10595 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10596 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10597 IEM_MC_ADVANCE_RIP_AND_FINISH();
10598 IEM_MC_END();
10599 break;
10600
10601 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10602 }
10603 }
10604}
10605
10606
10607/** Opcode 0x0f 0xb7. */
10608FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10609{
10610 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10611 IEMOP_HLP_MIN_386();
10612
10613 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10614
10615 /** @todo Not entirely sure how the operand size prefix is handled here,
10616 * assuming that it will be ignored. Would be nice to have a few
10617 * test for this. */
10618 /*
10619 * If rm is denoting a register, no more instruction bytes.
10620 */
10621 if (IEM_IS_MODRM_REG_MODE(bRm))
10622 {
10623 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10624 {
10625 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
10626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10627 IEM_MC_LOCAL(uint32_t, u32Value);
10628 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10629 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10630 IEM_MC_ADVANCE_RIP_AND_FINISH();
10631 IEM_MC_END();
10632 }
10633 else
10634 {
10635 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
10636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10637 IEM_MC_LOCAL(uint64_t, u64Value);
10638 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10639 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10640 IEM_MC_ADVANCE_RIP_AND_FINISH();
10641 IEM_MC_END();
10642 }
10643 }
10644 else
10645 {
10646 /*
10647 * We're loading a register from memory.
10648 */
10649 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10650 {
10651 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
10652 IEM_MC_LOCAL(uint32_t, u32Value);
10653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10656 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10657 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10658 IEM_MC_ADVANCE_RIP_AND_FINISH();
10659 IEM_MC_END();
10660 }
10661 else
10662 {
10663 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
10664 IEM_MC_LOCAL(uint64_t, u64Value);
10665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10668 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10669 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10670 IEM_MC_ADVANCE_RIP_AND_FINISH();
10671 IEM_MC_END();
10672 }
10673 }
10674}
10675
10676
10677/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10678FNIEMOP_UD_STUB(iemOp_jmpe);
10679
10680
10681/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
10682FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10683{
10684 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10685 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10686 return iemOp_InvalidNeedRM(pVCpu);
10687#ifndef TST_IEM_CHECK_MC
10688# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10689 static const IEMOPBINSIZES s_Native =
10690 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10691# endif
10692 static const IEMOPBINSIZES s_Fallback =
10693 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10694#endif
10695 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10696 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
10697}
10698
10699
10700/**
10701 * @opcode 0xb9
10702 * @opinvalid intel-modrm
10703 * @optest ->
10704 */
10705FNIEMOP_DEF(iemOp_Grp10)
10706{
10707 /*
10708 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10709 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10710 */
10711 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10712 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10713 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10714}
10715
10716
10717/**
10718 * Body for group 8 bit instruction.
10719 */
10720#define IEMOP_BODY_BIT_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10721 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10722 \
10723 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10724 { \
10725 /* register destination. */ \
10726 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10727 \
10728 switch (pVCpu->iem.s.enmEffOpSize) \
10729 { \
10730 case IEMMODE_16BIT: \
10731 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
10732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10733 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10734 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10735 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10736 \
10737 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10738 IEM_MC_REF_EFLAGS(pEFlags); \
10739 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10740 \
10741 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10742 IEM_MC_END(); \
10743 break; \
10744 \
10745 case IEMMODE_32BIT: \
10746 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
10747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10748 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10749 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10750 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10751 \
10752 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10753 IEM_MC_REF_EFLAGS(pEFlags); \
10754 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10755 \
10756 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
10757 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10758 IEM_MC_END(); \
10759 break; \
10760 \
10761 case IEMMODE_64BIT: \
10762 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT); \
10763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10764 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10765 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10766 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10767 \
10768 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10769 IEM_MC_REF_EFLAGS(pEFlags); \
10770 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10771 \
10772 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10773 IEM_MC_END(); \
10774 break; \
10775 \
10776 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10777 } \
10778 } \
10779 else \
10780 { \
10781 /* memory destination. */ \
10782 /** @todo test negative bit offsets! */ \
10783 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10784 { \
10785 switch (pVCpu->iem.s.enmEffOpSize) \
10786 { \
10787 case IEMMODE_16BIT: \
10788 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
10789 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10790 IEM_MC_ARG(uint16_t, u16Src, 1); \
10791 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10793 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10794 \
10795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10796 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10797 IEM_MC_ASSIGN(u16Src, bImm & 0x0f); \
10798 IEMOP_HLP_DONE_DECODING(); \
10799 IEM_MC_FETCH_EFLAGS(EFlags); \
10800 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10801 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10802 \
10803 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
10804 IEM_MC_COMMIT_EFLAGS(EFlags); \
10805 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10806 IEM_MC_END(); \
10807 break; \
10808 \
10809 case IEMMODE_32BIT: \
10810 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
10811 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10812 IEM_MC_ARG(uint32_t, u32Src, 1); \
10813 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10815 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10816 \
10817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10818 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10819 IEM_MC_ASSIGN(u32Src, bImm & 0x1f); \
10820 IEMOP_HLP_DONE_DECODING(); \
10821 IEM_MC_FETCH_EFLAGS(EFlags); \
10822 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10823 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10824 \
10825 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
10826 IEM_MC_COMMIT_EFLAGS(EFlags); \
10827 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10828 IEM_MC_END(); \
10829 break; \
10830 \
10831 case IEMMODE_64BIT: \
10832 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT); \
10833 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10834 IEM_MC_ARG(uint64_t, u64Src, 1); \
10835 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10837 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10838 \
10839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10840 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10841 IEM_MC_ASSIGN(u64Src, bImm & 0x3f); \
10842 IEMOP_HLP_DONE_DECODING(); \
10843 IEM_MC_FETCH_EFLAGS(EFlags); \
10844 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10845 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10846 \
10847 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
10848 IEM_MC_COMMIT_EFLAGS(EFlags); \
10849 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10850 IEM_MC_END(); \
10851 break; \
10852 \
10853 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10854 } \
10855 } \
10856 else \
10857 { \
10858 (void)0
10859/* Separate macro to work around parsing issue in IEMAllInstPython.py */
10860#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10861 switch (pVCpu->iem.s.enmEffOpSize) \
10862 { \
10863 case IEMMODE_16BIT: \
10864 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
10865 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10866 IEM_MC_ARG(uint16_t, u16Src, 1); \
10867 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10869 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10870 \
10871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10872 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10873 IEM_MC_ASSIGN(u16Src, bImm & 0x0f); \
10874 IEMOP_HLP_DONE_DECODING(); \
10875 IEM_MC_FETCH_EFLAGS(EFlags); \
10876 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10877 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
10878 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
10879 \
10880 IEM_MC_COMMIT_EFLAGS(EFlags); \
10881 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10882 IEM_MC_END(); \
10883 break; \
10884 \
10885 case IEMMODE_32BIT: \
10886 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
10887 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10888 IEM_MC_ARG(uint32_t, u32Src, 1); \
10889 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10891 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10892 \
10893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10894 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10895 IEM_MC_ASSIGN(u32Src, bImm & 0x1f); \
10896 IEMOP_HLP_DONE_DECODING(); \
10897 IEM_MC_FETCH_EFLAGS(EFlags); \
10898 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10899 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
10900 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
10901 \
10902 IEM_MC_COMMIT_EFLAGS(EFlags); \
10903 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10904 IEM_MC_END(); \
10905 break; \
10906 \
10907 case IEMMODE_64BIT: \
10908 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT); \
10909 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10910 IEM_MC_ARG(uint64_t, u64Src, 1); \
10911 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10912 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10913 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10914 \
10915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10916 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10917 IEM_MC_ASSIGN(u64Src, bImm & 0x3f); \
10918 IEMOP_HLP_DONE_DECODING(); \
10919 IEM_MC_FETCH_EFLAGS(EFlags); \
10920 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10921 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
10922 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
10923 \
10924 IEM_MC_COMMIT_EFLAGS(EFlags); \
10925 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10926 IEM_MC_END(); \
10927 break; \
10928 \
10929 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10930 } \
10931 } \
10932 } \
10933 (void)0
10934
10935/* Read-only version (bt) */
10936#define IEMOP_BODY_BIT_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10937 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10938 \
10939 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10940 { \
10941 /* register destination. */ \
10942 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10943 \
10944 switch (pVCpu->iem.s.enmEffOpSize) \
10945 { \
10946 case IEMMODE_16BIT: \
10947 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
10948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10949 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
10950 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10951 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10952 \
10953 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10954 IEM_MC_REF_EFLAGS(pEFlags); \
10955 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10956 \
10957 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10958 IEM_MC_END(); \
10959 break; \
10960 \
10961 case IEMMODE_32BIT: \
10962 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
10963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10964 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
10965 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10966 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10967 \
10968 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10969 IEM_MC_REF_EFLAGS(pEFlags); \
10970 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10971 \
10972 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10973 IEM_MC_END(); \
10974 break; \
10975 \
10976 case IEMMODE_64BIT: \
10977 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT); \
10978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10979 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
10980 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10981 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10982 \
10983 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10984 IEM_MC_REF_EFLAGS(pEFlags); \
10985 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10986 \
10987 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10988 IEM_MC_END(); \
10989 break; \
10990 \
10991 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10992 } \
10993 } \
10994 else \
10995 { \
10996 /* memory destination. */ \
10997 /** @todo test negative bit offsets! */ \
10998 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10999 { \
11000 switch (pVCpu->iem.s.enmEffOpSize) \
11001 { \
11002 case IEMMODE_16BIT: \
11003 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
11004 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
11005 IEM_MC_ARG(uint16_t, u16Src, 1); \
11006 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11008 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11009 \
11010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11011 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11012 IEM_MC_ASSIGN(u16Src, bImm & 0x0f); \
11013 IEMOP_HLP_DONE_DECODING(); \
11014 IEM_MC_FETCH_EFLAGS(EFlags); \
11015 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11016 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
11017 \
11018 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
11019 IEM_MC_COMMIT_EFLAGS(EFlags); \
11020 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11021 IEM_MC_END(); \
11022 break; \
11023 \
11024 case IEMMODE_32BIT: \
11025 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
11026 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
11027 IEM_MC_ARG(uint32_t, u32Src, 1); \
11028 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11029 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11030 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11031 \
11032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11033 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11034 IEM_MC_ASSIGN(u32Src, bImm & 0x1f); \
11035 IEMOP_HLP_DONE_DECODING(); \
11036 IEM_MC_FETCH_EFLAGS(EFlags); \
11037 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11038 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
11039 \
11040 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
11041 IEM_MC_COMMIT_EFLAGS(EFlags); \
11042 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11043 IEM_MC_END(); \
11044 break; \
11045 \
11046 case IEMMODE_64BIT: \
11047 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT); \
11048 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
11049 IEM_MC_ARG(uint64_t, u64Src, 1); \
11050 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11052 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11053 \
11054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11055 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11056 IEM_MC_ASSIGN(u64Src, bImm & 0x3f); \
11057 IEMOP_HLP_DONE_DECODING(); \
11058 IEM_MC_FETCH_EFLAGS(EFlags); \
11059 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11060 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
11061 \
11062 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
11063 IEM_MC_COMMIT_EFLAGS(EFlags); \
11064 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11065 IEM_MC_END(); \
11066 break; \
11067 \
11068 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11069 } \
11070 } \
11071 else \
11072 { \
11073 IEMOP_HLP_DONE_DECODING(); \
11074 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
11075 } \
11076 } \
11077 (void)0
11078
11079
11080/** Opcode 0x0f 0xba /4. */
11081FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
11082{
11083 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
11084 IEMOP_BODY_BIT_Ev_Ib_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
11085}
11086
11087
11088/** Opcode 0x0f 0xba /5. */
11089FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
11090{
11091 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
11092 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
11093 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
11094}
11095
11096
11097/** Opcode 0x0f 0xba /6. */
11098FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
11099{
11100 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
11101 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
11102 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
11103}
11104
11105
11106/** Opcode 0x0f 0xba /7. */
11107FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
11108{
11109 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
11110 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11111 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11112}
11113
11114
11115/** Opcode 0x0f 0xba. */
11116FNIEMOP_DEF(iemOp_Grp8)
11117{
11118 IEMOP_HLP_MIN_386();
11119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11120 switch (IEM_GET_MODRM_REG_8(bRm))
11121 {
11122 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
11123 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
11124 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
11125 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
11126
11127 case 0: case 1: case 2: case 3:
11128 /* Both AMD and Intel want full modr/m decoding and imm8. */
11129 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
11130
11131 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11132 }
11133}
11134
11135
11136/** Opcode 0x0f 0xbb. */
11137FNIEMOP_DEF(iemOp_btc_Ev_Gv)
11138{
11139 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
11140 IEMOP_HLP_MIN_386();
11141 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11142 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11143}
11144
11145
11146/**
11147 * Common worker for BSF and BSR instructions.
11148 *
11149 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
11150 * the destination register, which means that for 32-bit operations the high
11151 * bits must be left alone.
11152 *
11153 * @param pImpl Pointer to the instruction implementation (assembly).
11154 */
11155FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
11156{
11157 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11158
11159 /*
11160 * If rm is denoting a register, no more instruction bytes.
11161 */
11162 if (IEM_IS_MODRM_REG_MODE(bRm))
11163 {
11164 switch (pVCpu->iem.s.enmEffOpSize)
11165 {
11166 case IEMMODE_16BIT:
11167 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386);
11168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11169 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11170 IEM_MC_ARG(uint16_t, u16Src, 1);
11171 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11172
11173 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11174 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11175 IEM_MC_REF_EFLAGS(pEFlags);
11176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11177
11178 IEM_MC_ADVANCE_RIP_AND_FINISH();
11179 IEM_MC_END();
11180 break;
11181
11182 case IEMMODE_32BIT:
11183 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386);
11184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11185 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11186 IEM_MC_ARG(uint32_t, u32Src, 1);
11187 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11188
11189 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11190 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11191 IEM_MC_REF_EFLAGS(pEFlags);
11192 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11193 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11194 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11195 } IEM_MC_ENDIF();
11196 IEM_MC_ADVANCE_RIP_AND_FINISH();
11197 IEM_MC_END();
11198 break;
11199
11200 case IEMMODE_64BIT:
11201 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT);
11202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11203 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11204 IEM_MC_ARG(uint64_t, u64Src, 1);
11205 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11206
11207 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11208 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11209 IEM_MC_REF_EFLAGS(pEFlags);
11210 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11211
11212 IEM_MC_ADVANCE_RIP_AND_FINISH();
11213 IEM_MC_END();
11214 break;
11215
11216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11217 }
11218 }
11219 else
11220 {
11221 /*
11222 * We're accessing memory.
11223 */
11224 switch (pVCpu->iem.s.enmEffOpSize)
11225 {
11226 case IEMMODE_16BIT:
11227 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386);
11228 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11229 IEM_MC_ARG(uint16_t, u16Src, 1);
11230 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11232
11233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11235 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11236 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11237 IEM_MC_REF_EFLAGS(pEFlags);
11238 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11239
11240 IEM_MC_ADVANCE_RIP_AND_FINISH();
11241 IEM_MC_END();
11242 break;
11243
11244 case IEMMODE_32BIT:
11245 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386);
11246 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11247 IEM_MC_ARG(uint32_t, u32Src, 1);
11248 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11250
11251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11253 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11254 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11255 IEM_MC_REF_EFLAGS(pEFlags);
11256 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11257
11258 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11259 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11260 } IEM_MC_ENDIF();
11261 IEM_MC_ADVANCE_RIP_AND_FINISH();
11262 IEM_MC_END();
11263 break;
11264
11265 case IEMMODE_64BIT:
11266 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT);
11267 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11268 IEM_MC_ARG(uint64_t, u64Src, 1);
11269 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11271
11272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11274 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11275 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11276 IEM_MC_REF_EFLAGS(pEFlags);
11277 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11278
11279 IEM_MC_ADVANCE_RIP_AND_FINISH();
11280 IEM_MC_END();
11281 break;
11282
11283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11284 }
11285 }
11286}
11287
11288
11289/** Opcode 0x0f 0xbc. */
11290FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11291{
11292 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11293 IEMOP_HLP_MIN_386();
11294 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11295 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
11296}
11297
11298
11299/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
11300FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11301{
11302 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11303 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11304 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11305
11306#ifndef TST_IEM_CHECK_MC
11307 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11308 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11309 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11310 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11311 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11312 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11313 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11314 {
11315 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11316 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11317 };
11318#endif
11319 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11320 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11321 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11322 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
11323}
11324
11325
11326/** Opcode 0x0f 0xbd. */
11327FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11328{
11329 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11330 IEMOP_HLP_MIN_386();
11331 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11332 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
11333}
11334
11335
11336/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
11337FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11338{
11339 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11340 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11341 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11342
11343#ifndef TST_IEM_CHECK_MC
11344 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11345 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11346 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11347 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11348 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11349 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11350 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11351 {
11352 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11353 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11354 };
11355#endif
11356 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11357 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11358 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11359 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
11360}
11361
11362
11363
11364/** Opcode 0x0f 0xbe. */
11365FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11366{
11367 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11368 IEMOP_HLP_MIN_386();
11369
11370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11371
11372 /*
11373 * If rm is denoting a register, no more instruction bytes.
11374 */
11375 if (IEM_IS_MODRM_REG_MODE(bRm))
11376 {
11377 switch (pVCpu->iem.s.enmEffOpSize)
11378 {
11379 case IEMMODE_16BIT:
11380 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
11381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11382 IEM_MC_LOCAL(uint16_t, u16Value);
11383 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11384 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11385 IEM_MC_ADVANCE_RIP_AND_FINISH();
11386 IEM_MC_END();
11387 break;
11388
11389 case IEMMODE_32BIT:
11390 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
11391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11392 IEM_MC_LOCAL(uint32_t, u32Value);
11393 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11394 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11395 IEM_MC_ADVANCE_RIP_AND_FINISH();
11396 IEM_MC_END();
11397 break;
11398
11399 case IEMMODE_64BIT:
11400 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
11401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11402 IEM_MC_LOCAL(uint64_t, u64Value);
11403 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11404 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11405 IEM_MC_ADVANCE_RIP_AND_FINISH();
11406 IEM_MC_END();
11407 break;
11408
11409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11410 }
11411 }
11412 else
11413 {
11414 /*
11415 * We're loading a register from memory.
11416 */
11417 switch (pVCpu->iem.s.enmEffOpSize)
11418 {
11419 case IEMMODE_16BIT:
11420 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
11421 IEM_MC_LOCAL(uint16_t, u16Value);
11422 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11425 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11426 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11427 IEM_MC_ADVANCE_RIP_AND_FINISH();
11428 IEM_MC_END();
11429 break;
11430
11431 case IEMMODE_32BIT:
11432 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
11433 IEM_MC_LOCAL(uint32_t, u32Value);
11434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11437 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11438 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11439 IEM_MC_ADVANCE_RIP_AND_FINISH();
11440 IEM_MC_END();
11441 break;
11442
11443 case IEMMODE_64BIT:
11444 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
11445 IEM_MC_LOCAL(uint64_t, u64Value);
11446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11449 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11450 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11451 IEM_MC_ADVANCE_RIP_AND_FINISH();
11452 IEM_MC_END();
11453 break;
11454
11455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11456 }
11457 }
11458}
11459
11460
11461/** Opcode 0x0f 0xbf. */
11462FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11463{
11464 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11465 IEMOP_HLP_MIN_386();
11466
11467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11468
11469 /** @todo Not entirely sure how the operand size prefix is handled here,
11470 * assuming that it will be ignored. Would be nice to have a few
11471 * test for this. */
11472 /*
11473 * If rm is denoting a register, no more instruction bytes.
11474 */
11475 if (IEM_IS_MODRM_REG_MODE(bRm))
11476 {
11477 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11478 {
11479 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
11480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11481 IEM_MC_LOCAL(uint32_t, u32Value);
11482 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11483 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11484 IEM_MC_ADVANCE_RIP_AND_FINISH();
11485 IEM_MC_END();
11486 }
11487 else
11488 {
11489 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
11490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11491 IEM_MC_LOCAL(uint64_t, u64Value);
11492 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11493 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11494 IEM_MC_ADVANCE_RIP_AND_FINISH();
11495 IEM_MC_END();
11496 }
11497 }
11498 else
11499 {
11500 /*
11501 * We're loading a register from memory.
11502 */
11503 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11504 {
11505 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
11506 IEM_MC_LOCAL(uint32_t, u32Value);
11507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11510 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11511 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11512 IEM_MC_ADVANCE_RIP_AND_FINISH();
11513 IEM_MC_END();
11514 }
11515 else
11516 {
11517 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
11518 IEM_MC_LOCAL(uint64_t, u64Value);
11519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11522 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11523 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11524 IEM_MC_ADVANCE_RIP_AND_FINISH();
11525 IEM_MC_END();
11526 }
11527 }
11528}
11529
11530
11531/** Opcode 0x0f 0xc0. */
11532FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11533{
11534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11535 IEMOP_HLP_MIN_486();
11536 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11537
11538 /*
11539 * If rm is denoting a register, no more instruction bytes.
11540 */
11541 if (IEM_IS_MODRM_REG_MODE(bRm))
11542 {
11543 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486);
11544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11545 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11546 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11547 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11548
11549 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11550 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11551 IEM_MC_REF_EFLAGS(pEFlags);
11552 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11553
11554 IEM_MC_ADVANCE_RIP_AND_FINISH();
11555 IEM_MC_END();
11556 }
11557 else
11558 {
11559 /*
11560 * We're accessing memory.
11561 */
11562 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486);
11563 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11564 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11565 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11566 IEM_MC_LOCAL(uint8_t, u8RegCopy);
11567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11568 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11569
11570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11571 IEMOP_HLP_DONE_DECODING();
11572 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11573 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11574 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
11575 IEM_MC_FETCH_EFLAGS(EFlags);
11576 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11577 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11578 else
11579 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
11580
11581 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
11582 IEM_MC_COMMIT_EFLAGS(EFlags);
11583 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
11584 IEM_MC_ADVANCE_RIP_AND_FINISH();
11585 IEM_MC_END();
11586 }
11587}
11588
11589
11590/** Opcode 0x0f 0xc1. */
11591FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11592{
11593 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11594 IEMOP_HLP_MIN_486();
11595 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11596
11597 /*
11598 * If rm is denoting a register, no more instruction bytes.
11599 */
11600 if (IEM_IS_MODRM_REG_MODE(bRm))
11601 {
11602 switch (pVCpu->iem.s.enmEffOpSize)
11603 {
11604 case IEMMODE_16BIT:
11605 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486);
11606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11607 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11608 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11609 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11610
11611 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11612 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11613 IEM_MC_REF_EFLAGS(pEFlags);
11614 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11615
11616 IEM_MC_ADVANCE_RIP_AND_FINISH();
11617 IEM_MC_END();
11618 break;
11619
11620 case IEMMODE_32BIT:
11621 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486);
11622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11623 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11624 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11625 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11626
11627 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11628 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11629 IEM_MC_REF_EFLAGS(pEFlags);
11630 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11631
11632 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11633 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11634 IEM_MC_ADVANCE_RIP_AND_FINISH();
11635 IEM_MC_END();
11636 break;
11637
11638 case IEMMODE_64BIT:
11639 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT);
11640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11641 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11642 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11643 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11644
11645 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11646 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11647 IEM_MC_REF_EFLAGS(pEFlags);
11648 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11649
11650 IEM_MC_ADVANCE_RIP_AND_FINISH();
11651 IEM_MC_END();
11652 break;
11653
11654 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11655 }
11656 }
11657 else
11658 {
11659 /*
11660 * We're accessing memory.
11661 */
11662 switch (pVCpu->iem.s.enmEffOpSize)
11663 {
11664 case IEMMODE_16BIT:
11665 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486);
11666 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11667 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11668 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11669 IEM_MC_LOCAL(uint16_t, u16RegCopy);
11670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11671 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11672
11673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11674 IEMOP_HLP_DONE_DECODING();
11675 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11676 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11677 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
11678 IEM_MC_FETCH_EFLAGS(EFlags);
11679 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11680 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11681 else
11682 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
11683
11684 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
11685 IEM_MC_COMMIT_EFLAGS(EFlags);
11686 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
11687 IEM_MC_ADVANCE_RIP_AND_FINISH();
11688 IEM_MC_END();
11689 break;
11690
11691 case IEMMODE_32BIT:
11692 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486);
11693 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11694 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11695 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11696 IEM_MC_LOCAL(uint32_t, u32RegCopy);
11697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11698 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11699
11700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11701 IEMOP_HLP_DONE_DECODING();
11702 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11703 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11704 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
11705 IEM_MC_FETCH_EFLAGS(EFlags);
11706 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11707 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11708 else
11709 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
11710
11711 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
11712 IEM_MC_COMMIT_EFLAGS(EFlags);
11713 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
11714 IEM_MC_ADVANCE_RIP_AND_FINISH();
11715 IEM_MC_END();
11716 break;
11717
11718 case IEMMODE_64BIT:
11719 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT);
11720 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11721 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11722 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11723 IEM_MC_LOCAL(uint64_t, u64RegCopy);
11724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11725 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11726
11727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11728 IEMOP_HLP_DONE_DECODING();
11729 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11730 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11731 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
11732 IEM_MC_FETCH_EFLAGS(EFlags);
11733 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11734 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11735 else
11736 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
11737
11738 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
11739 IEM_MC_COMMIT_EFLAGS(EFlags);
11740 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
11741 IEM_MC_ADVANCE_RIP_AND_FINISH();
11742 IEM_MC_END();
11743 break;
11744
11745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11746 }
11747 }
11748}
11749
11750
11751/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11752FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11753{
11754 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11755
11756 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11757 if (IEM_IS_MODRM_REG_MODE(bRm))
11758 {
11759 /*
11760 * XMM, XMM.
11761 */
11762 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER);
11763 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11765 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11766 IEM_MC_LOCAL(X86XMMREG, Dst);
11767 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11768 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11769 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11770 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11771 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11772 IEM_MC_PREPARE_SSE_USAGE();
11773 IEM_MC_REF_MXCSR(pfMxcsr);
11774 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11775 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11776 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11777 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11778 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11779 } IEM_MC_ELSE() {
11780 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11781 } IEM_MC_ENDIF();
11782
11783 IEM_MC_ADVANCE_RIP_AND_FINISH();
11784 IEM_MC_END();
11785 }
11786 else
11787 {
11788 /*
11789 * XMM, [mem128].
11790 */
11791 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER);
11792 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11793 IEM_MC_LOCAL(X86XMMREG, Dst);
11794 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11795 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11796 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11798
11799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11800 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11801 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11803 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11804 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11805
11806 IEM_MC_PREPARE_SSE_USAGE();
11807 IEM_MC_REF_MXCSR(pfMxcsr);
11808 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11809 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11810 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11811 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11812 } IEM_MC_ELSE() {
11813 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11814 } IEM_MC_ENDIF();
11815
11816 IEM_MC_ADVANCE_RIP_AND_FINISH();
11817 IEM_MC_END();
11818 }
11819}
11820
11821
11822/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11823FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11824{
11825 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11826
11827 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11828 if (IEM_IS_MODRM_REG_MODE(bRm))
11829 {
11830 /*
11831 * XMM, XMM.
11832 */
11833 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER);
11834 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11836 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11837 IEM_MC_LOCAL(X86XMMREG, Dst);
11838 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11839 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11840 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11841 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11842 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11843 IEM_MC_PREPARE_SSE_USAGE();
11844 IEM_MC_REF_MXCSR(pfMxcsr);
11845 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11846 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11847 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11848 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11849 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11850 } IEM_MC_ELSE() {
11851 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11852 } IEM_MC_ENDIF();
11853
11854 IEM_MC_ADVANCE_RIP_AND_FINISH();
11855 IEM_MC_END();
11856 }
11857 else
11858 {
11859 /*
11860 * XMM, [mem128].
11861 */
11862 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER);
11863 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11864 IEM_MC_LOCAL(X86XMMREG, Dst);
11865 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11866 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11867 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11869
11870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11871 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11872 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11874 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11875 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11876
11877 IEM_MC_PREPARE_SSE_USAGE();
11878 IEM_MC_REF_MXCSR(pfMxcsr);
11879 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11880 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11881 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11882 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11883 } IEM_MC_ELSE() {
11884 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11885 } IEM_MC_ENDIF();
11886
11887 IEM_MC_ADVANCE_RIP_AND_FINISH();
11888 IEM_MC_END();
11889 }
11890}
11891
11892
11893/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11894FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11895{
11896 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11897
11898 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11899 if (IEM_IS_MODRM_REG_MODE(bRm))
11900 {
11901 /*
11902 * XMM32, XMM32.
11903 */
11904 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER);
11905 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11907 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11908 IEM_MC_LOCAL(X86XMMREG, Dst);
11909 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11910 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11911 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11912 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11913 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11914 IEM_MC_PREPARE_SSE_USAGE();
11915 IEM_MC_REF_MXCSR(pfMxcsr);
11916 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11917 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11918 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11919 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11920 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11921 } IEM_MC_ELSE() {
11922 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11923 } IEM_MC_ENDIF();
11924
11925 IEM_MC_ADVANCE_RIP_AND_FINISH();
11926 IEM_MC_END();
11927 }
11928 else
11929 {
11930 /*
11931 * XMM32, [mem32].
11932 */
11933 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER);
11934 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11935 IEM_MC_LOCAL(X86XMMREG, Dst);
11936 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11937 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11938 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11940
11941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11942 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11943 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11945 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11946 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11947
11948 IEM_MC_PREPARE_SSE_USAGE();
11949 IEM_MC_REF_MXCSR(pfMxcsr);
11950 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11951 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11952 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11953 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11954 } IEM_MC_ELSE() {
11955 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11956 } IEM_MC_ENDIF();
11957
11958 IEM_MC_ADVANCE_RIP_AND_FINISH();
11959 IEM_MC_END();
11960 }
11961}
11962
11963
11964/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11965FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11966{
11967 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11968
11969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11970 if (IEM_IS_MODRM_REG_MODE(bRm))
11971 {
11972 /*
11973 * XMM64, XMM64.
11974 */
11975 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER);
11976 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11978 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11979 IEM_MC_LOCAL(X86XMMREG, Dst);
11980 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11981 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11982 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11983 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11984 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11985 IEM_MC_PREPARE_SSE_USAGE();
11986 IEM_MC_REF_MXCSR(pfMxcsr);
11987 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11988 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11989 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11990 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11991 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11992 } IEM_MC_ELSE() {
11993 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11994 } IEM_MC_ENDIF();
11995
11996 IEM_MC_ADVANCE_RIP_AND_FINISH();
11997 IEM_MC_END();
11998 }
11999 else
12000 {
12001 /*
12002 * XMM64, [mem64].
12003 */
12004 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER);
12005 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12006 IEM_MC_LOCAL(X86XMMREG, Dst);
12007 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12008 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12009 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12011
12012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12013 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12014 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12016 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12017 IEM_MC_FETCH_MEM_XMM_U64(Src.uSrc2, 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12018
12019 IEM_MC_PREPARE_SSE_USAGE();
12020 IEM_MC_REF_MXCSR(pfMxcsr);
12021 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
12022 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
12023 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12024 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12025 } IEM_MC_ELSE() {
12026 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
12027 } IEM_MC_ENDIF();
12028
12029 IEM_MC_ADVANCE_RIP_AND_FINISH();
12030 IEM_MC_END();
12031 }
12032}
12033
12034
12035/** Opcode 0x0f 0xc3. */
12036FNIEMOP_DEF(iemOp_movnti_My_Gy)
12037{
12038 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
12039
12040 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12041
12042 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
12043 if (IEM_IS_MODRM_MEM_MODE(bRm))
12044 {
12045 switch (pVCpu->iem.s.enmEffOpSize)
12046 {
12047 case IEMMODE_32BIT:
12048 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
12049 IEM_MC_LOCAL(uint32_t, u32Value);
12050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12051
12052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12054
12055 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12056 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
12057 IEM_MC_ADVANCE_RIP_AND_FINISH();
12058 IEM_MC_END();
12059 break;
12060
12061 case IEMMODE_64BIT:
12062 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
12063 IEM_MC_LOCAL(uint64_t, u64Value);
12064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12065
12066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12068
12069 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12070 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
12071 IEM_MC_ADVANCE_RIP_AND_FINISH();
12072 IEM_MC_END();
12073 break;
12074
12075 case IEMMODE_16BIT:
12076 /** @todo check this form. */
12077 IEMOP_RAISE_INVALID_OPCODE_RET();
12078
12079 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12080 }
12081 }
12082 else
12083 IEMOP_RAISE_INVALID_OPCODE_RET();
12084}
12085
12086
12087/* Opcode 0x66 0x0f 0xc3 - invalid */
12088/* Opcode 0xf3 0x0f 0xc3 - invalid */
12089/* Opcode 0xf2 0x0f 0xc3 - invalid */
12090
12091
12092/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
12093FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
12094{
12095 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12096 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12097 if (IEM_IS_MODRM_REG_MODE(bRm))
12098 {
12099 /*
12100 * Register, register.
12101 */
12102 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER);
12103 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12105 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12106 IEM_MC_ARG(uint16_t, u16Src, 1);
12107 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12108 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12109 IEM_MC_PREPARE_FPU_USAGE();
12110 IEM_MC_FPU_TO_MMX_MODE();
12111 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
12112 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
12113 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
12114 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
12115 IEM_MC_ADVANCE_RIP_AND_FINISH();
12116 IEM_MC_END();
12117 }
12118 else
12119 {
12120 /*
12121 * Register, memory.
12122 */
12123 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER);
12124 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12125 IEM_MC_ARG(uint16_t, u16Src, 1);
12126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12127
12128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12129 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12130 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12132 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12133 IEM_MC_PREPARE_FPU_USAGE();
12134 IEM_MC_FPU_TO_MMX_MODE();
12135
12136 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12137 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
12138 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
12139 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
12140 IEM_MC_ADVANCE_RIP_AND_FINISH();
12141 IEM_MC_END();
12142 }
12143}
12144
12145
12146/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
12147FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
12148{
12149 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12151 if (IEM_IS_MODRM_REG_MODE(bRm))
12152 {
12153 /*
12154 * Register, register.
12155 */
12156 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER);
12157 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12159 IEM_MC_ARG(PRTUINT128U, puDst, 0);
12160 IEM_MC_ARG(uint16_t, u16Src, 1);
12161 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12162 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12163 IEM_MC_PREPARE_SSE_USAGE();
12164 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
12165 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12166 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
12167 IEM_MC_ADVANCE_RIP_AND_FINISH();
12168 IEM_MC_END();
12169 }
12170 else
12171 {
12172 /*
12173 * Register, memory.
12174 */
12175 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER);
12176 IEM_MC_ARG(PRTUINT128U, puDst, 0);
12177 IEM_MC_ARG(uint16_t, u16Src, 1);
12178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12179
12180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12181 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12182 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12184 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12185 IEM_MC_PREPARE_SSE_USAGE();
12186
12187 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12188 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12189 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
12190 IEM_MC_ADVANCE_RIP_AND_FINISH();
12191 IEM_MC_END();
12192 }
12193}
12194
12195
12196/* Opcode 0xf3 0x0f 0xc4 - invalid */
12197/* Opcode 0xf2 0x0f 0xc4 - invalid */
12198
12199
12200/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
12201FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
12202{
12203 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
12204 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12205 if (IEM_IS_MODRM_REG_MODE(bRm))
12206 {
12207 /*
12208 * Greg32, MMX, imm8.
12209 */
12210 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER);
12211 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12213 IEM_MC_LOCAL(uint16_t, u16Dst);
12214 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12215 IEM_MC_ARG(uint64_t, u64Src, 1);
12216 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12217 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12218 IEM_MC_PREPARE_FPU_USAGE();
12219 IEM_MC_FPU_TO_MMX_MODE();
12220 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
12221 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bImmArg);
12222 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12223 IEM_MC_ADVANCE_RIP_AND_FINISH();
12224 IEM_MC_END();
12225 }
12226 /* No memory operand. */
12227 else
12228 IEMOP_RAISE_INVALID_OPCODE_RET();
12229}
12230
12231
12232/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12233FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12234{
12235 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12237 if (IEM_IS_MODRM_REG_MODE(bRm))
12238 {
12239 /*
12240 * Greg32, XMM, imm8.
12241 */
12242 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER);
12243 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12245 IEM_MC_LOCAL(uint16_t, u16Dst);
12246 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12247 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12248 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12249 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12250 IEM_MC_PREPARE_SSE_USAGE();
12251 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12252 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bImmArg);
12253 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12254 IEM_MC_ADVANCE_RIP_AND_FINISH();
12255 IEM_MC_END();
12256 }
12257 /* No memory operand. */
12258 else
12259 IEMOP_RAISE_INVALID_OPCODE_RET();
12260}
12261
12262
12263/* Opcode 0xf3 0x0f 0xc5 - invalid */
12264/* Opcode 0xf2 0x0f 0xc5 - invalid */
12265
12266
12267/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12268FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12269{
12270 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12272 if (IEM_IS_MODRM_REG_MODE(bRm))
12273 {
12274 /*
12275 * XMM, XMM, imm8.
12276 */
12277 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER);
12278 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12280 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12281 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12282 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12283 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12284 IEM_MC_PREPARE_SSE_USAGE();
12285 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12286 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12287 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12288 IEM_MC_ADVANCE_RIP_AND_FINISH();
12289 IEM_MC_END();
12290 }
12291 else
12292 {
12293 /*
12294 * XMM, [mem128], imm8.
12295 */
12296 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER);
12297 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12298 IEM_MC_LOCAL(RTUINT128U, uSrc);
12299 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12301
12302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12303 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12304 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12306 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12307 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12308
12309 IEM_MC_PREPARE_SSE_USAGE();
12310 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12311 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12312
12313 IEM_MC_ADVANCE_RIP_AND_FINISH();
12314 IEM_MC_END();
12315 }
12316}
12317
12318
12319/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12320FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12321{
12322 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12323 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12324 if (IEM_IS_MODRM_REG_MODE(bRm))
12325 {
12326 /*
12327 * XMM, XMM, imm8.
12328 */
12329 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER);
12330 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12332 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12333 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12334 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12335 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12336 IEM_MC_PREPARE_SSE_USAGE();
12337 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12338 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12339 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12340 IEM_MC_ADVANCE_RIP_AND_FINISH();
12341 IEM_MC_END();
12342 }
12343 else
12344 {
12345 /*
12346 * XMM, [mem128], imm8.
12347 */
12348 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER);
12349 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12350 IEM_MC_LOCAL(RTUINT128U, uSrc);
12351 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12353
12354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12355 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12356 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12358 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12359 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12360
12361 IEM_MC_PREPARE_SSE_USAGE();
12362 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12363 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12364
12365 IEM_MC_ADVANCE_RIP_AND_FINISH();
12366 IEM_MC_END();
12367 }
12368}
12369
12370
12371/* Opcode 0xf3 0x0f 0xc6 - invalid */
12372/* Opcode 0xf2 0x0f 0xc6 - invalid */
12373
12374
12375/** Opcode 0x0f 0xc7 !11/1. */
12376FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12377{
12378 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12379
12380 IEM_MC_BEGIN(4, 5, IEM_MC_F_NOT_286_OR_OLDER);
12381 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
12382 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
12383 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
12384 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12385 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
12386 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
12387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12388 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12389
12390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12391 IEMOP_HLP_DONE_DECODING_EX(fCmpXchg8b);
12392 IEM_MC_MEM_MAP_U64_RW(pu64MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12393
12394 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
12395 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
12396 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
12397
12398 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
12399 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
12400 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
12401
12402 IEM_MC_FETCH_EFLAGS(EFlags);
12403 if ( !(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)
12404 && (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12405 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12406 else
12407 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12408
12409 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64MemDst, bUnmapInfo);
12410 IEM_MC_COMMIT_EFLAGS(EFlags);
12411 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12412 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
12413 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
12414 } IEM_MC_ENDIF();
12415 IEM_MC_ADVANCE_RIP_AND_FINISH();
12416
12417 IEM_MC_END();
12418}
12419
12420
12421/** Opcode REX.W 0x0f 0xc7 !11/1. */
12422FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12423{
12424 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12425 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12426 {
12427 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT);
12428 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
12429 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
12430 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
12431 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12432 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
12433 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
12434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12435
12436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12437 IEMOP_HLP_DONE_DECODING();
12438 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
12439 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12440
12441 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
12442 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
12443 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
12444
12445 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
12446 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
12447 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
12448
12449 IEM_MC_FETCH_EFLAGS(EFlags);
12450
12451#ifdef RT_ARCH_AMD64 /* some code duplication here because IEMAllInstPython.py cannot parse if/else/#if spaghetti. */
12452 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12453 {
12454 if ( !(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)
12455 && (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12456 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12457 else
12458 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12459 }
12460 else
12461 { /* (see comments in #else case below) */
12462 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12463 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12464 else
12465 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12466 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12467 }
12468
12469#elif defined(RT_ARCH_ARM64)
12470 /** @todo may require fallback for unaligned accesses... */
12471 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12472 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12473 else
12474 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12475
12476#else
12477 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12478 accesses and not all all atomic, which works fine on in UNI CPU guest
12479 configuration (ignoring DMA). If guest SMP is active we have no choice
12480 but to use a rendezvous callback here. Sigh. */
12481 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12482 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12483 else
12484 {
12485 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12486 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12487 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12488 }
12489#endif
12490
12491 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
12492 IEM_MC_COMMIT_EFLAGS(EFlags);
12493 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12494 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
12495 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
12496 } IEM_MC_ENDIF();
12497 IEM_MC_ADVANCE_RIP_AND_FINISH();
12498
12499 IEM_MC_END();
12500 }
12501 Log(("cmpxchg16b -> #UD\n"));
12502 IEMOP_RAISE_INVALID_OPCODE_RET();
12503}
12504
12505FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12506{
12507 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12508 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12509 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12510}
12511
12512
12513/** Opcode 0x0f 0xc7 11/6. */
12514FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12515{
12516 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12517 IEMOP_RAISE_INVALID_OPCODE_RET();
12518
12519 if (IEM_IS_MODRM_REG_MODE(bRm))
12520 {
12521 /* register destination. */
12522 switch (pVCpu->iem.s.enmEffOpSize)
12523 {
12524 case IEMMODE_16BIT:
12525 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
12526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12527 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12528 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12529
12530 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12531 IEM_MC_REF_EFLAGS(pEFlags);
12532 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u16, iemAImpl_rdrand_u16_fallback),
12533 pu16Dst, pEFlags);
12534
12535 IEM_MC_ADVANCE_RIP_AND_FINISH();
12536 IEM_MC_END();
12537 break;
12538
12539 case IEMMODE_32BIT:
12540 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386);
12541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12542 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12543 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12544
12545 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12546 IEM_MC_REF_EFLAGS(pEFlags);
12547 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u32, iemAImpl_rdrand_u32_fallback),
12548 pu32Dst, pEFlags);
12549
12550 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12551 IEM_MC_ADVANCE_RIP_AND_FINISH();
12552 IEM_MC_END();
12553 break;
12554
12555 case IEMMODE_64BIT:
12556 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT);
12557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12558 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12559 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12560
12561 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12562 IEM_MC_REF_EFLAGS(pEFlags);
12563 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u64, iemAImpl_rdrand_u64_fallback),
12564 pu64Dst, pEFlags);
12565
12566 IEM_MC_ADVANCE_RIP_AND_FINISH();
12567 IEM_MC_END();
12568 break;
12569
12570 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12571 }
12572 }
12573 /* Register only. */
12574 else
12575 IEMOP_RAISE_INVALID_OPCODE_RET();
12576}
12577
12578/** Opcode 0x0f 0xc7 !11/6. */
12579#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12580FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12581{
12582 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12583 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12584 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12585 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
12586 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12587 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12589 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12590 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12591 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12592 IEM_MC_END();
12593}
12594#else
12595FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12596#endif
12597
12598/** Opcode 0x66 0x0f 0xc7 !11/6. */
12599#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12600FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12601{
12602 IEMOP_MNEMONIC(vmclear, "vmclear");
12603 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12604 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12605 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
12606 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12607 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12609 IEMOP_HLP_DONE_DECODING();
12610 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12611 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12612 IEM_MC_END();
12613}
12614#else
12615FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12616#endif
12617
12618/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12619#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12620FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12621{
12622 IEMOP_MNEMONIC(vmxon, "vmxon");
12623 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12624 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
12625 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12626 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12628 IEMOP_HLP_DONE_DECODING();
12629 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12630 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12631 IEM_MC_END();
12632}
12633#else
12634FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12635#endif
12636
12637/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12638#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12639FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12640{
12641 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12642 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12643 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12644 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
12645 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12646 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12648 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12649 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12650 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12651 IEM_MC_END();
12652}
12653#else
12654FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12655#endif
12656
12657/** Opcode 0x0f 0xc7 11/7. */
12658FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12659{
12660 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12661 IEMOP_RAISE_INVALID_OPCODE_RET();
12662
12663 if (IEM_IS_MODRM_REG_MODE(bRm))
12664 {
12665 /* register destination. */
12666 switch (pVCpu->iem.s.enmEffOpSize)
12667 {
12668 case IEMMODE_16BIT:
12669 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
12670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12671 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12672 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12673
12674 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12675 IEM_MC_REF_EFLAGS(pEFlags);
12676 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u16, iemAImpl_rdseed_u16_fallback),
12677 pu16Dst, pEFlags);
12678
12679 IEM_MC_ADVANCE_RIP_AND_FINISH();
12680 IEM_MC_END();
12681 break;
12682
12683 case IEMMODE_32BIT:
12684 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386);
12685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12686 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12687 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12688
12689 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12690 IEM_MC_REF_EFLAGS(pEFlags);
12691 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u32, iemAImpl_rdseed_u32_fallback),
12692 pu32Dst, pEFlags);
12693
12694 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12695 IEM_MC_ADVANCE_RIP_AND_FINISH();
12696 IEM_MC_END();
12697 break;
12698
12699 case IEMMODE_64BIT:
12700 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT);
12701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12702 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12703 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12704
12705 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12706 IEM_MC_REF_EFLAGS(pEFlags);
12707 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u64, iemAImpl_rdseed_u64_fallback),
12708 pu64Dst, pEFlags);
12709
12710 IEM_MC_ADVANCE_RIP_AND_FINISH();
12711 IEM_MC_END();
12712 break;
12713
12714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12715 }
12716 }
12717 /* Register only. */
12718 else
12719 IEMOP_RAISE_INVALID_OPCODE_RET();
12720}
12721
12722/**
12723 * Group 9 jump table for register variant.
12724 */
12725IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12726{ /* pfx: none, 066h, 0f3h, 0f2h */
12727 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12728 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12729 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12730 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12731 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12732 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12733 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12734 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12735};
12736AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12737
12738
12739/**
12740 * Group 9 jump table for memory variant.
12741 */
12742IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12743{ /* pfx: none, 066h, 0f3h, 0f2h */
12744 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12745 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12746 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12747 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12748 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12749 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12750 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12751 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12752};
12753AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12754
12755
12756/** Opcode 0x0f 0xc7. */
12757FNIEMOP_DEF(iemOp_Grp9)
12758{
12759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12760 if (IEM_IS_MODRM_REG_MODE(bRm))
12761 /* register, register */
12762 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12763 + pVCpu->iem.s.idxPrefix], bRm);
12764 /* memory, register */
12765 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12766 + pVCpu->iem.s.idxPrefix], bRm);
12767}
12768
12769
12770/**
12771 * Common 'bswap register' helper.
12772 */
12773FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12774{
12775 switch (pVCpu->iem.s.enmEffOpSize)
12776 {
12777 case IEMMODE_16BIT:
12778 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_486);
12779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12780 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12781 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12782 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12783 IEM_MC_ADVANCE_RIP_AND_FINISH();
12784 IEM_MC_END();
12785 break;
12786
12787 case IEMMODE_32BIT:
12788 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_486);
12789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12790 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12791 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12792 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12793 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12794 IEM_MC_ADVANCE_RIP_AND_FINISH();
12795 IEM_MC_END();
12796 break;
12797
12798 case IEMMODE_64BIT:
12799 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT);
12800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12801 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12802 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12803 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12804 IEM_MC_ADVANCE_RIP_AND_FINISH();
12805 IEM_MC_END();
12806 break;
12807
12808 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12809 }
12810}
12811
12812
12813/** Opcode 0x0f 0xc8. */
12814FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12815{
12816 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12817 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12818 prefix. REX.B is the correct prefix it appears. For a parallel
12819 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12820 IEMOP_HLP_MIN_486();
12821 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12822}
12823
12824
12825/** Opcode 0x0f 0xc9. */
12826FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12827{
12828 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12829 IEMOP_HLP_MIN_486();
12830 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12831}
12832
12833
12834/** Opcode 0x0f 0xca. */
12835FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12836{
12837 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12838 IEMOP_HLP_MIN_486();
12839 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12840}
12841
12842
12843/** Opcode 0x0f 0xcb. */
12844FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12845{
12846 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12847 IEMOP_HLP_MIN_486();
12848 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12849}
12850
12851
12852/** Opcode 0x0f 0xcc. */
12853FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12854{
12855 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12856 IEMOP_HLP_MIN_486();
12857 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12858}
12859
12860
12861/** Opcode 0x0f 0xcd. */
12862FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12863{
12864 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12865 IEMOP_HLP_MIN_486();
12866 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12867}
12868
12869
12870/** Opcode 0x0f 0xce. */
12871FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12872{
12873 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12874 IEMOP_HLP_MIN_486();
12875 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12876}
12877
12878
12879/** Opcode 0x0f 0xcf. */
12880FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12881{
12882 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12883 IEMOP_HLP_MIN_486();
12884 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12885}
12886
12887
12888/* Opcode 0x0f 0xd0 - invalid */
12889
12890
12891/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12892FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12893{
12894 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12895 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12896}
12897
12898
12899/* Opcode 0xf3 0x0f 0xd0 - invalid */
12900
12901
12902/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12903FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12904{
12905 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12906 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12907}
12908
12909
12910
12911/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12912FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12913{
12914 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12915 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12916}
12917
12918/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12919FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12920{
12921 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12922 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12923}
12924
12925/* Opcode 0xf3 0x0f 0xd1 - invalid */
12926/* Opcode 0xf2 0x0f 0xd1 - invalid */
12927
12928/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12929FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12930{
12931 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12932 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12933}
12934
12935
12936/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12937FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12938{
12939 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12940 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12941}
12942
12943
12944/* Opcode 0xf3 0x0f 0xd2 - invalid */
12945/* Opcode 0xf2 0x0f 0xd2 - invalid */
12946
12947/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12948FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12949{
12950 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12951 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12952}
12953
12954
12955/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12956FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12957{
12958 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12959 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12960}
12961
12962
12963/* Opcode 0xf3 0x0f 0xd3 - invalid */
12964/* Opcode 0xf2 0x0f 0xd3 - invalid */
12965
12966
12967/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12968FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12969{
12970 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12971 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
12972}
12973
12974
12975/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12976FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12977{
12978 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12979 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
12980}
12981
12982
12983/* Opcode 0xf3 0x0f 0xd4 - invalid */
12984/* Opcode 0xf2 0x0f 0xd4 - invalid */
12985
12986/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12987FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12988{
12989 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12990 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
12991}
12992
12993/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12994FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12995{
12996 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12997 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
12998}
12999
13000
13001/* Opcode 0xf3 0x0f 0xd5 - invalid */
13002/* Opcode 0xf2 0x0f 0xd5 - invalid */
13003
13004/* Opcode 0x0f 0xd6 - invalid */
13005
13006/**
13007 * @opcode 0xd6
13008 * @oppfx 0x66
13009 * @opcpuid sse2
13010 * @opgroup og_sse2_pcksclr_datamove
13011 * @opxcpttype none
13012 * @optest op1=-1 op2=2 -> op1=2
13013 * @optest op1=0 op2=-42 -> op1=-42
13014 */
13015FNIEMOP_DEF(iemOp_movq_Wq_Vq)
13016{
13017 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13018 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13019 if (IEM_IS_MODRM_REG_MODE(bRm))
13020 {
13021 /*
13022 * Register, register.
13023 */
13024 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER);
13025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13026 IEM_MC_LOCAL(uint64_t, uSrc);
13027
13028 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13029 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13030
13031 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
13032 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
13033
13034 IEM_MC_ADVANCE_RIP_AND_FINISH();
13035 IEM_MC_END();
13036 }
13037 else
13038 {
13039 /*
13040 * Memory, register.
13041 */
13042 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER);
13043 IEM_MC_LOCAL(uint64_t, uSrc);
13044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13045
13046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13048 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13049 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13050
13051 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
13052 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13053
13054 IEM_MC_ADVANCE_RIP_AND_FINISH();
13055 IEM_MC_END();
13056 }
13057}
13058
13059
13060/**
13061 * @opcode 0xd6
13062 * @opcodesub 11 mr/reg
13063 * @oppfx f3
13064 * @opcpuid sse2
13065 * @opgroup og_sse2_simdint_datamove
13066 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13067 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13068 */
13069FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
13070{
13071 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13072 if (IEM_IS_MODRM_REG_MODE(bRm))
13073 {
13074 /*
13075 * Register, register.
13076 */
13077 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13078 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER);
13079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13080 IEM_MC_LOCAL(uint64_t, uSrc);
13081
13082 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13083 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13084 IEM_MC_FPU_TO_MMX_MODE();
13085
13086 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
13087 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
13088
13089 IEM_MC_ADVANCE_RIP_AND_FINISH();
13090 IEM_MC_END();
13091 }
13092
13093 /**
13094 * @opdone
13095 * @opmnemonic udf30fd6mem
13096 * @opcode 0xd6
13097 * @opcodesub !11 mr/reg
13098 * @oppfx f3
13099 * @opunused intel-modrm
13100 * @opcpuid sse
13101 * @optest ->
13102 */
13103 else
13104 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13105}
13106
13107
13108/**
13109 * @opcode 0xd6
13110 * @opcodesub 11 mr/reg
13111 * @oppfx f2
13112 * @opcpuid sse2
13113 * @opgroup og_sse2_simdint_datamove
13114 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13115 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13116 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
13117 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
13118 * @optest op1=-42 op2=0xfedcba9876543210
13119 * -> op1=0xfedcba9876543210 ftw=0xff
13120 */
13121FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
13122{
13123 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13124 if (IEM_IS_MODRM_REG_MODE(bRm))
13125 {
13126 /*
13127 * Register, register.
13128 */
13129 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13130 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER);
13131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13132 IEM_MC_LOCAL(uint64_t, uSrc);
13133
13134 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13135 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13136 IEM_MC_FPU_TO_MMX_MODE();
13137
13138 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
13139 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
13140
13141 IEM_MC_ADVANCE_RIP_AND_FINISH();
13142 IEM_MC_END();
13143 }
13144
13145 /**
13146 * @opdone
13147 * @opmnemonic udf20fd6mem
13148 * @opcode 0xd6
13149 * @opcodesub !11 mr/reg
13150 * @oppfx f2
13151 * @opunused intel-modrm
13152 * @opcpuid sse
13153 * @optest ->
13154 */
13155 else
13156 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13157}
13158
13159
13160/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
13161FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
13162{
13163 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13164 /* Docs says register only. */
13165 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13166 {
13167 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13168 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
13169 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
13170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13171 IEM_MC_ARG(uint64_t *, puDst, 0);
13172 IEM_MC_ARG(uint64_t const *, puSrc, 1);
13173 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13174 IEM_MC_PREPARE_FPU_USAGE();
13175 IEM_MC_FPU_TO_MMX_MODE();
13176
13177 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13178 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
13179 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
13180
13181 IEM_MC_ADVANCE_RIP_AND_FINISH();
13182 IEM_MC_END();
13183 }
13184 else
13185 IEMOP_RAISE_INVALID_OPCODE_RET();
13186}
13187
13188
13189/** Opcode 0x66 0x0f 0xd7 - */
13190FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
13191{
13192 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13193 /* Docs says register only. */
13194 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13195 {
13196 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13197 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
13198 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
13199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13200 IEM_MC_ARG(uint64_t *, puDst, 0);
13201 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
13202 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13203 IEM_MC_PREPARE_SSE_USAGE();
13204 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13205 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
13206 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
13207 IEM_MC_ADVANCE_RIP_AND_FINISH();
13208 IEM_MC_END();
13209 }
13210 else
13211 IEMOP_RAISE_INVALID_OPCODE_RET();
13212}
13213
13214
13215/* Opcode 0xf3 0x0f 0xd7 - invalid */
13216/* Opcode 0xf2 0x0f 0xd7 - invalid */
13217
13218
13219/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
13220FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
13221{
13222 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13223 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
13224}
13225
13226
13227/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
13228FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
13229{
13230 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13231 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
13232}
13233
13234
13235/* Opcode 0xf3 0x0f 0xd8 - invalid */
13236/* Opcode 0xf2 0x0f 0xd8 - invalid */
13237
13238/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13239FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13240{
13241 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13242 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
13243}
13244
13245
13246/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13247FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13248{
13249 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13250 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
13251}
13252
13253
13254/* Opcode 0xf3 0x0f 0xd9 - invalid */
13255/* Opcode 0xf2 0x0f 0xd9 - invalid */
13256
13257/** Opcode 0x0f 0xda - pminub Pq, Qq */
13258FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13259{
13260 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13261 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
13262}
13263
13264
13265/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13266FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13267{
13268 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13269 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
13270}
13271
13272/* Opcode 0xf3 0x0f 0xda - invalid */
13273/* Opcode 0xf2 0x0f 0xda - invalid */
13274
13275/** Opcode 0x0f 0xdb - pand Pq, Qq */
13276FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13277{
13278 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13279 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
13280}
13281
13282
13283/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13284FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13285{
13286 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13287 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
13288}
13289
13290
13291/* Opcode 0xf3 0x0f 0xdb - invalid */
13292/* Opcode 0xf2 0x0f 0xdb - invalid */
13293
13294/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13295FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13296{
13297 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13298 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
13299}
13300
13301
13302/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13303FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13304{
13305 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13306 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
13307}
13308
13309
13310/* Opcode 0xf3 0x0f 0xdc - invalid */
13311/* Opcode 0xf2 0x0f 0xdc - invalid */
13312
13313/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13314FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13315{
13316 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13317 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
13318}
13319
13320
13321/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13322FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13323{
13324 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13325 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
13326}
13327
13328
13329/* Opcode 0xf3 0x0f 0xdd - invalid */
13330/* Opcode 0xf2 0x0f 0xdd - invalid */
13331
13332/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13333FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13334{
13335 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13336 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
13337}
13338
13339
13340/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13341FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13342{
13343 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13344 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
13345}
13346
13347/* Opcode 0xf3 0x0f 0xde - invalid */
13348/* Opcode 0xf2 0x0f 0xde - invalid */
13349
13350
13351/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13352FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13353{
13354 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13355 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
13356}
13357
13358
13359/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13360FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13361{
13362 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13363 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
13364}
13365
13366
13367/* Opcode 0xf3 0x0f 0xdf - invalid */
13368/* Opcode 0xf2 0x0f 0xdf - invalid */
13369
13370/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13371FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13372{
13373 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13374 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13375}
13376
13377
13378/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13379FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13380{
13381 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13382 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13383}
13384
13385
13386/* Opcode 0xf3 0x0f 0xe0 - invalid */
13387/* Opcode 0xf2 0x0f 0xe0 - invalid */
13388
13389/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13390FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13391{
13392 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13393 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13394}
13395
13396
13397/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13398FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13399{
13400 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13401 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13402}
13403
13404
13405/* Opcode 0xf3 0x0f 0xe1 - invalid */
13406/* Opcode 0xf2 0x0f 0xe1 - invalid */
13407
13408/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13409FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13410{
13411 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13412 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13413}
13414
13415
13416/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13417FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13418{
13419 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13420 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13421}
13422
13423
13424/* Opcode 0xf3 0x0f 0xe2 - invalid */
13425/* Opcode 0xf2 0x0f 0xe2 - invalid */
13426
13427/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13428FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13429{
13430 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13431 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13432}
13433
13434
13435/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13436FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13437{
13438 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13439 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13440}
13441
13442
13443/* Opcode 0xf3 0x0f 0xe3 - invalid */
13444/* Opcode 0xf2 0x0f 0xe3 - invalid */
13445
13446/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13447FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13448{
13449 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13450 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13451}
13452
13453
13454/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13455FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13456{
13457 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13458 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13459}
13460
13461
13462/* Opcode 0xf3 0x0f 0xe4 - invalid */
13463/* Opcode 0xf2 0x0f 0xe4 - invalid */
13464
13465/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13466FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13467{
13468 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13469 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
13470}
13471
13472
13473/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13474FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13475{
13476 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13477 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
13478}
13479
13480
13481/* Opcode 0xf3 0x0f 0xe5 - invalid */
13482/* Opcode 0xf2 0x0f 0xe5 - invalid */
13483/* Opcode 0x0f 0xe6 - invalid */
13484
13485
13486/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13487FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13488{
13489 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13490 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13491}
13492
13493
13494/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13495FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13496{
13497 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13498 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13499}
13500
13501
13502/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13503FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13504{
13505 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13506 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13507}
13508
13509
13510/**
13511 * @opcode 0xe7
13512 * @opcodesub !11 mr/reg
13513 * @oppfx none
13514 * @opcpuid sse
13515 * @opgroup og_sse1_cachect
13516 * @opxcpttype none
13517 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13518 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13519 */
13520FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13521{
13522 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13523 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13524 if (IEM_IS_MODRM_MEM_MODE(bRm))
13525 {
13526 /* Register, memory. */
13527 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER);
13528 IEM_MC_LOCAL(uint64_t, uSrc);
13529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13530
13531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13533 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13534 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13535 IEM_MC_FPU_TO_MMX_MODE();
13536
13537 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13538 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13539
13540 IEM_MC_ADVANCE_RIP_AND_FINISH();
13541 IEM_MC_END();
13542 }
13543 /**
13544 * @opdone
13545 * @opmnemonic ud0fe7reg
13546 * @opcode 0xe7
13547 * @opcodesub 11 mr/reg
13548 * @oppfx none
13549 * @opunused immediate
13550 * @opcpuid sse
13551 * @optest ->
13552 */
13553 else
13554 IEMOP_RAISE_INVALID_OPCODE_RET();
13555}
13556
13557/**
13558 * @opcode 0xe7
13559 * @opcodesub !11 mr/reg
13560 * @oppfx 0x66
13561 * @opcpuid sse2
13562 * @opgroup og_sse2_cachect
13563 * @opxcpttype 1
13564 * @optest op1=-1 op2=2 -> op1=2
13565 * @optest op1=0 op2=-42 -> op1=-42
13566 */
13567FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13568{
13569 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13570 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13571 if (IEM_IS_MODRM_MEM_MODE(bRm))
13572 {
13573 /* Register, memory. */
13574 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER);
13575 IEM_MC_LOCAL(RTUINT128U, uSrc);
13576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13577
13578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13580 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13581 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13582
13583 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13584 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13585
13586 IEM_MC_ADVANCE_RIP_AND_FINISH();
13587 IEM_MC_END();
13588 }
13589
13590 /**
13591 * @opdone
13592 * @opmnemonic ud660fe7reg
13593 * @opcode 0xe7
13594 * @opcodesub 11 mr/reg
13595 * @oppfx 0x66
13596 * @opunused immediate
13597 * @opcpuid sse
13598 * @optest ->
13599 */
13600 else
13601 IEMOP_RAISE_INVALID_OPCODE_RET();
13602}
13603
13604/* Opcode 0xf3 0x0f 0xe7 - invalid */
13605/* Opcode 0xf2 0x0f 0xe7 - invalid */
13606
13607
13608/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13609FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13610{
13611 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13612 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
13613}
13614
13615
13616/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13617FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13618{
13619 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13620 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
13621}
13622
13623
13624/* Opcode 0xf3 0x0f 0xe8 - invalid */
13625/* Opcode 0xf2 0x0f 0xe8 - invalid */
13626
13627/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13628FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13629{
13630 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13631 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
13632}
13633
13634
13635/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13636FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13637{
13638 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13639 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
13640}
13641
13642
13643/* Opcode 0xf3 0x0f 0xe9 - invalid */
13644/* Opcode 0xf2 0x0f 0xe9 - invalid */
13645
13646
13647/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13648FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13649{
13650 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13651 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
13652}
13653
13654
13655/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13656FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13657{
13658 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13659 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
13660}
13661
13662
13663/* Opcode 0xf3 0x0f 0xea - invalid */
13664/* Opcode 0xf2 0x0f 0xea - invalid */
13665
13666
13667/** Opcode 0x0f 0xeb - por Pq, Qq */
13668FNIEMOP_DEF(iemOp_por_Pq_Qq)
13669{
13670 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13671 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
13672}
13673
13674
13675/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13676FNIEMOP_DEF(iemOp_por_Vx_Wx)
13677{
13678 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13679 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
13680}
13681
13682
13683/* Opcode 0xf3 0x0f 0xeb - invalid */
13684/* Opcode 0xf2 0x0f 0xeb - invalid */
13685
13686/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13687FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13688{
13689 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13690 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
13691}
13692
13693
13694/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13695FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13696{
13697 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13698 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
13699}
13700
13701
13702/* Opcode 0xf3 0x0f 0xec - invalid */
13703/* Opcode 0xf2 0x0f 0xec - invalid */
13704
13705/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13706FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13707{
13708 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13709 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
13710}
13711
13712
13713/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13714FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13715{
13716 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13717 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
13718}
13719
13720
13721/* Opcode 0xf3 0x0f 0xed - invalid */
13722/* Opcode 0xf2 0x0f 0xed - invalid */
13723
13724
13725/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13726FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13727{
13728 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13729 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13730}
13731
13732
13733/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13734FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13735{
13736 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13737 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13738}
13739
13740
13741/* Opcode 0xf3 0x0f 0xee - invalid */
13742/* Opcode 0xf2 0x0f 0xee - invalid */
13743
13744
13745/** Opcode 0x0f 0xef - pxor Pq, Qq */
13746FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13747{
13748 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13749 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
13750}
13751
13752
13753/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13754FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13755{
13756 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13757 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
13758}
13759
13760
13761/* Opcode 0xf3 0x0f 0xef - invalid */
13762/* Opcode 0xf2 0x0f 0xef - invalid */
13763
13764/* Opcode 0x0f 0xf0 - invalid */
13765/* Opcode 0x66 0x0f 0xf0 - invalid */
13766
13767
13768/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13769FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13770{
13771 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13773 if (IEM_IS_MODRM_REG_MODE(bRm))
13774 {
13775 /*
13776 * Register, register - (not implemented, assuming it raises \#UD).
13777 */
13778 IEMOP_RAISE_INVALID_OPCODE_RET();
13779 }
13780 else
13781 {
13782 /*
13783 * Register, memory.
13784 */
13785 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER);
13786 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13788
13789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
13791 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13792 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13793 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13794 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13795
13796 IEM_MC_ADVANCE_RIP_AND_FINISH();
13797 IEM_MC_END();
13798 }
13799}
13800
13801
13802/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13803FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13804{
13805 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13806 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13807}
13808
13809
13810/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13811FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13812{
13813 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13814 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13815}
13816
13817
13818/* Opcode 0xf2 0x0f 0xf1 - invalid */
13819
13820/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13821FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13822{
13823 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13824 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13825}
13826
13827
13828/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13829FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13830{
13831 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13832 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13833}
13834
13835
13836/* Opcode 0xf2 0x0f 0xf2 - invalid */
13837
13838/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13839FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13840{
13841 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13842 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13843}
13844
13845
13846/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13847FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13848{
13849 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13850 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13851}
13852
13853/* Opcode 0xf2 0x0f 0xf3 - invalid */
13854
13855/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13856FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13857{
13858 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13859 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
13860}
13861
13862
13863/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13864FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13865{
13866 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13867 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
13868}
13869
13870
13871/* Opcode 0xf2 0x0f 0xf4 - invalid */
13872
13873/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13874FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13875{
13876 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13877 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13878}
13879
13880
13881/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13882FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13883{
13884 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13885 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13886}
13887
13888/* Opcode 0xf2 0x0f 0xf5 - invalid */
13889
13890/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13891FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13892{
13893 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13894 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13895}
13896
13897
13898/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13899FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13900{
13901 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13902 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13903}
13904
13905
13906/* Opcode 0xf2 0x0f 0xf6 - invalid */
13907
13908/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13909FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13910/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13911FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13912/* Opcode 0xf2 0x0f 0xf7 - invalid */
13913
13914
13915/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13916FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13917{
13918 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13919 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
13920}
13921
13922
13923/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13924FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13925{
13926 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13927 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
13928}
13929
13930
13931/* Opcode 0xf2 0x0f 0xf8 - invalid */
13932
13933
13934/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13935FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13936{
13937 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13938 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
13939}
13940
13941
13942/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13943FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13944{
13945 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13946 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
13947}
13948
13949
13950/* Opcode 0xf2 0x0f 0xf9 - invalid */
13951
13952
13953/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13954FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13955{
13956 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13957 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
13958}
13959
13960
13961/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13962FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13963{
13964 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13965 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
13966}
13967
13968
13969/* Opcode 0xf2 0x0f 0xfa - invalid */
13970
13971
13972/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13973FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13974{
13975 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13976 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
13977}
13978
13979
13980/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13981FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13982{
13983 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13984 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
13985}
13986
13987
13988/* Opcode 0xf2 0x0f 0xfb - invalid */
13989
13990
13991/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13992FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13993{
13994 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13995 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
13996}
13997
13998
13999/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
14000FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
14001{
14002 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14003 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
14004}
14005
14006
14007/* Opcode 0xf2 0x0f 0xfc - invalid */
14008
14009
14010/** Opcode 0x0f 0xfd - paddw Pq, Qq */
14011FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
14012{
14013 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14014 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
14015}
14016
14017
14018/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
14019FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
14020{
14021 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14022 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
14023}
14024
14025
14026/* Opcode 0xf2 0x0f 0xfd - invalid */
14027
14028
14029/** Opcode 0x0f 0xfe - paddd Pq, Qq */
14030FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
14031{
14032 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14033 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
14034}
14035
14036
14037/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
14038FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
14039{
14040 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14041 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
14042}
14043
14044
14045/* Opcode 0xf2 0x0f 0xfe - invalid */
14046
14047
14048/** Opcode **** 0x0f 0xff - UD0 */
14049FNIEMOP_DEF(iemOp_ud0)
14050{
14051 IEMOP_MNEMONIC(ud0, "ud0");
14052 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
14053 {
14054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
14055 if (IEM_IS_MODRM_MEM_MODE(bRm))
14056 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
14057 }
14058 IEMOP_HLP_DONE_DECODING();
14059 IEMOP_RAISE_INVALID_OPCODE_RET();
14060}
14061
14062
14063
14064/**
14065 * Two byte opcode map, first byte 0x0f.
14066 *
14067 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
14068 * check if it needs updating as well when making changes.
14069 */
14070const PFNIEMOP g_apfnTwoByteMap[] =
14071{
14072 /* no prefix, 066h prefix f3h prefix, f2h prefix */
14073 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
14074 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
14075 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
14076 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
14077 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
14078 /* 0x05 */ IEMOP_X4(iemOp_syscall),
14079 /* 0x06 */ IEMOP_X4(iemOp_clts),
14080 /* 0x07 */ IEMOP_X4(iemOp_sysret),
14081 /* 0x08 */ IEMOP_X4(iemOp_invd),
14082 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
14083 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
14084 /* 0x0b */ IEMOP_X4(iemOp_ud2),
14085 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
14086 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
14087 /* 0x0e */ IEMOP_X4(iemOp_femms),
14088 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
14089
14090 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
14091 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
14092 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
14093 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14094 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14095 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14096 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
14097 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14098 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
14099 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
14100 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
14101 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
14102 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
14103 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
14104 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
14105 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
14106
14107 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
14108 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
14109 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
14110 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
14111 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
14112 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14113 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
14114 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14115 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14116 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14117 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
14118 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14119 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
14120 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
14121 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14122 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14123
14124 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
14125 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
14126 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
14127 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
14128 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
14129 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
14130 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
14131 /* 0x37 */ IEMOP_X4(iemOp_getsec),
14132 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
14133 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14134 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
14135 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14136 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14137 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14138 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14139 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14140
14141 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
14142 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
14143 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
14144 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
14145 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
14146 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
14147 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
14148 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
14149 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
14150 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
14151 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
14152 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
14153 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
14154 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
14155 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
14156 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
14157
14158 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14159 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
14160 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
14161 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
14162 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14163 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14164 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14165 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14166 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
14167 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
14168 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
14169 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
14170 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
14171 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
14172 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
14173 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
14174
14175 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14176 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14177 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14178 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14179 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14180 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14181 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14182 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14183 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14184 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14185 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14186 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14187 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14188 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14189 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14190 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
14191
14192 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
14193 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
14194 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
14195 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
14196 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14197 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14198 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14199 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14200
14201 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14202 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14203 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14204 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14205 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
14206 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
14207 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
14208 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
14209
14210 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
14211 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
14212 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
14213 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
14214 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
14215 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
14216 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
14217 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
14218 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
14219 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
14220 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
14221 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
14222 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14223 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14224 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14225 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14226
14227 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14228 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14229 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14230 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14231 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14232 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14233 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14234 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14235 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14236 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14237 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14238 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14239 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14240 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14241 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14242 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14243
14244 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14245 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14246 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14247 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14248 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14249 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14250 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14251 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14252 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14253 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14254 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14255 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14256 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14257 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14258 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14259 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14260
14261 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14262 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14263 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14264 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14265 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14266 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14267 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14268 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14269 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14270 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14271 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14272 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14273 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14274 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14275 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14276 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14277
14278 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14279 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14280 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14281 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14282 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14283 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14284 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14285 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14286 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14287 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14288 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14289 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14290 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14291 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14292 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14293 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14294
14295 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14296 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14297 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14298 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14299 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14300 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14301 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14302 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14303 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14304 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14305 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14306 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14307 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14308 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14309 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14310 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14311
14312 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14313 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14314 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14315 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14316 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14317 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14318 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14319 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14320 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14321 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14322 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14323 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14324 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14325 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14326 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14327 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14328
14329 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14330 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14331 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14332 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14333 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14334 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14335 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14336 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14337 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14338 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14339 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14340 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14341 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14342 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14343 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14344 /* 0xff */ IEMOP_X4(iemOp_ud0),
14345};
14346AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14347
14348/** @} */
14349
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette