VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 84826

Last change on this file since 84826 was 84477, checked in by vboxsync, 5 years ago

IEM: Implemented dummy MOV to/from TR. Avoids crashes in OS/2 KDB and WDEB386 when emulating a 386.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 342.9 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 84477 2020-05-24 18:18:55Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2020 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
38 }
39
40 /* Ignore operand size here, memory refs are always 16-bit. */
41 IEM_MC_BEGIN(2, 0);
42 IEM_MC_ARG(uint16_t, iEffSeg, 0);
43 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
44 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
45 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
46 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
47 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
48 IEM_MC_END();
49 return VINF_SUCCESS;
50}
51
52
53/** Opcode 0x0f 0x00 /1. */
54FNIEMOPRM_DEF(iemOp_Grp6_str)
55{
56 IEMOP_MNEMONIC(str, "str Rv/Mw");
57 IEMOP_HLP_MIN_286();
58 IEMOP_HLP_NO_REAL_OR_V86_MODE();
59
60
61 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
62 {
63 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
64 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
65 }
66
67 /* Ignore operand size here, memory refs are always 16-bit. */
68 IEM_MC_BEGIN(2, 0);
69 IEM_MC_ARG(uint16_t, iEffSeg, 0);
70 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
71 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
72 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
73 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
74 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
75 IEM_MC_END();
76 return VINF_SUCCESS;
77}
78
79
80/** Opcode 0x0f 0x00 /2. */
81FNIEMOPRM_DEF(iemOp_Grp6_lldt)
82{
83 IEMOP_MNEMONIC(lldt, "lldt Ew");
84 IEMOP_HLP_MIN_286();
85 IEMOP_HLP_NO_REAL_OR_V86_MODE();
86
87 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
88 {
89 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
90 IEM_MC_BEGIN(1, 0);
91 IEM_MC_ARG(uint16_t, u16Sel, 0);
92 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
93 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
94 IEM_MC_END();
95 }
96 else
97 {
98 IEM_MC_BEGIN(1, 1);
99 IEM_MC_ARG(uint16_t, u16Sel, 0);
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
102 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
103 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
104 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
105 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
106 IEM_MC_END();
107 }
108 return VINF_SUCCESS;
109}
110
111
112/** Opcode 0x0f 0x00 /3. */
113FNIEMOPRM_DEF(iemOp_Grp6_ltr)
114{
115 IEMOP_MNEMONIC(ltr, "ltr Ew");
116 IEMOP_HLP_MIN_286();
117 IEMOP_HLP_NO_REAL_OR_V86_MODE();
118
119 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
120 {
121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
122 IEM_MC_BEGIN(1, 0);
123 IEM_MC_ARG(uint16_t, u16Sel, 0);
124 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
125 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
126 IEM_MC_END();
127 }
128 else
129 {
130 IEM_MC_BEGIN(1, 1);
131 IEM_MC_ARG(uint16_t, u16Sel, 0);
132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
135 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
136 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
137 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
138 IEM_MC_END();
139 }
140 return VINF_SUCCESS;
141}
142
143
144/** Opcode 0x0f 0x00 /3. */
145FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
146{
147 IEMOP_HLP_MIN_286();
148 IEMOP_HLP_NO_REAL_OR_V86_MODE();
149
150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
151 {
152 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
153 IEM_MC_BEGIN(2, 0);
154 IEM_MC_ARG(uint16_t, u16Sel, 0);
155 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
156 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
157 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
158 IEM_MC_END();
159 }
160 else
161 {
162 IEM_MC_BEGIN(2, 1);
163 IEM_MC_ARG(uint16_t, u16Sel, 0);
164 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
167 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
168 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
169 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
170 IEM_MC_END();
171 }
172 return VINF_SUCCESS;
173}
174
175
176/** Opcode 0x0f 0x00 /4. */
177FNIEMOPRM_DEF(iemOp_Grp6_verr)
178{
179 IEMOP_MNEMONIC(verr, "verr Ew");
180 IEMOP_HLP_MIN_286();
181 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
182}
183
184
185/** Opcode 0x0f 0x00 /5. */
186FNIEMOPRM_DEF(iemOp_Grp6_verw)
187{
188 IEMOP_MNEMONIC(verw, "verw Ew");
189 IEMOP_HLP_MIN_286();
190 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
191}
192
193
194/**
195 * Group 6 jump table.
196 */
197IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
198{
199 iemOp_Grp6_sldt,
200 iemOp_Grp6_str,
201 iemOp_Grp6_lldt,
202 iemOp_Grp6_ltr,
203 iemOp_Grp6_verr,
204 iemOp_Grp6_verw,
205 iemOp_InvalidWithRM,
206 iemOp_InvalidWithRM
207};
208
209/** Opcode 0x0f 0x00. */
210FNIEMOP_DEF(iemOp_Grp6)
211{
212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
213 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
214}
215
216
217/** Opcode 0x0f 0x01 /0. */
218FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
219{
220 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_64BIT_OP_SIZE();
223 IEM_MC_BEGIN(2, 1);
224 IEM_MC_ARG(uint8_t, iEffSeg, 0);
225 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
228 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
229 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
230 IEM_MC_END();
231 return VINF_SUCCESS;
232}
233
234
235/** Opcode 0x0f 0x01 /0. */
236FNIEMOP_DEF(iemOp_Grp7_vmcall)
237{
238 IEMOP_MNEMONIC(vmcall, "vmcall");
239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
240
241 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
242 want all hypercalls regardless of instruction used, and if a
243 hypercall isn't handled by GIM or HMSvm will raise an #UD.
244 (NEM/win makes ASSUMPTIONS about this behavior.) */
245 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
246}
247
248
249/** Opcode 0x0f 0x01 /0. */
250#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
251FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
252{
253 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
254 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
255 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
256 IEMOP_HLP_DONE_DECODING();
257 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
258}
259#else
260FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
261{
262 IEMOP_BITCH_ABOUT_STUB();
263 return IEMOP_RAISE_INVALID_OPCODE();
264}
265#endif
266
267
268/** Opcode 0x0f 0x01 /0. */
269#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
270FNIEMOP_DEF(iemOp_Grp7_vmresume)
271{
272 IEMOP_MNEMONIC(vmresume, "vmresume");
273 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
274 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
275 IEMOP_HLP_DONE_DECODING();
276 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
277}
278#else
279FNIEMOP_DEF(iemOp_Grp7_vmresume)
280{
281 IEMOP_BITCH_ABOUT_STUB();
282 return IEMOP_RAISE_INVALID_OPCODE();
283}
284#endif
285
286
287/** Opcode 0x0f 0x01 /0. */
288#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
289FNIEMOP_DEF(iemOp_Grp7_vmxoff)
290{
291 IEMOP_MNEMONIC(vmxoff, "vmxoff");
292 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
293 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
294 IEMOP_HLP_DONE_DECODING();
295 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
296}
297#else
298FNIEMOP_DEF(iemOp_Grp7_vmxoff)
299{
300 IEMOP_BITCH_ABOUT_STUB();
301 return IEMOP_RAISE_INVALID_OPCODE();
302}
303#endif
304
305
306/** Opcode 0x0f 0x01 /1. */
307FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
308{
309 IEMOP_MNEMONIC(sidt, "sidt Ms");
310 IEMOP_HLP_MIN_286();
311 IEMOP_HLP_64BIT_OP_SIZE();
312 IEM_MC_BEGIN(2, 1);
313 IEM_MC_ARG(uint8_t, iEffSeg, 0);
314 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
317 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
318 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
319 IEM_MC_END();
320 return VINF_SUCCESS;
321}
322
323
324/** Opcode 0x0f 0x01 /1. */
325FNIEMOP_DEF(iemOp_Grp7_monitor)
326{
327 IEMOP_MNEMONIC(monitor, "monitor");
328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
329 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
330}
331
332
333/** Opcode 0x0f 0x01 /1. */
334FNIEMOP_DEF(iemOp_Grp7_mwait)
335{
336 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
338 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
339}
340
341
342/** Opcode 0x0f 0x01 /2. */
343FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
344{
345 IEMOP_MNEMONIC(lgdt, "lgdt");
346 IEMOP_HLP_64BIT_OP_SIZE();
347 IEM_MC_BEGIN(3, 1);
348 IEM_MC_ARG(uint8_t, iEffSeg, 0);
349 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
350 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
353 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
354 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
355 IEM_MC_END();
356 return VINF_SUCCESS;
357}
358
359
360/** Opcode 0x0f 0x01 0xd0. */
361FNIEMOP_DEF(iemOp_Grp7_xgetbv)
362{
363 IEMOP_MNEMONIC(xgetbv, "xgetbv");
364 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
365 {
366 /** @todo r=ramshankar: We should use
367 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
368 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
369 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
370 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
371 }
372 return IEMOP_RAISE_INVALID_OPCODE();
373}
374
375
376/** Opcode 0x0f 0x01 0xd1. */
377FNIEMOP_DEF(iemOp_Grp7_xsetbv)
378{
379 IEMOP_MNEMONIC(xsetbv, "xsetbv");
380 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
381 {
382 /** @todo r=ramshankar: We should use
383 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
384 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
385 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
386 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
387 }
388 return IEMOP_RAISE_INVALID_OPCODE();
389}
390
391
392/** Opcode 0x0f 0x01 /3. */
393FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
394{
395 IEMOP_MNEMONIC(lidt, "lidt");
396 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
397 ? IEMMODE_64BIT
398 : pVCpu->iem.s.enmEffOpSize;
399 IEM_MC_BEGIN(3, 1);
400 IEM_MC_ARG(uint8_t, iEffSeg, 0);
401 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
402 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
405 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
406 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
407 IEM_MC_END();
408 return VINF_SUCCESS;
409}
410
411
412/** Opcode 0x0f 0x01 0xd8. */
413#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
414FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
415{
416 IEMOP_MNEMONIC(vmrun, "vmrun");
417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
418 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
419}
420#else
421FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
422#endif
423
424/** Opcode 0x0f 0x01 0xd9. */
425FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
426{
427 IEMOP_MNEMONIC(vmmcall, "vmmcall");
428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
429
430 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
431 want all hypercalls regardless of instruction used, and if a
432 hypercall isn't handled by GIM or HMSvm will raise an #UD.
433 (NEM/win makes ASSUMPTIONS about this behavior.) */
434 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
435}
436
437/** Opcode 0x0f 0x01 0xda. */
438#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
439FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
440{
441 IEMOP_MNEMONIC(vmload, "vmload");
442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
443 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
444}
445#else
446FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
447#endif
448
449
450/** Opcode 0x0f 0x01 0xdb. */
451#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
452FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
453{
454 IEMOP_MNEMONIC(vmsave, "vmsave");
455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
456 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
457}
458#else
459FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
460#endif
461
462
463/** Opcode 0x0f 0x01 0xdc. */
464#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
465FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
466{
467 IEMOP_MNEMONIC(stgi, "stgi");
468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
469 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
470}
471#else
472FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
473#endif
474
475
476/** Opcode 0x0f 0x01 0xdd. */
477#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
478FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
479{
480 IEMOP_MNEMONIC(clgi, "clgi");
481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
482 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
483}
484#else
485FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
486#endif
487
488
489/** Opcode 0x0f 0x01 0xdf. */
490#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
491FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
492{
493 IEMOP_MNEMONIC(invlpga, "invlpga");
494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
495 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
496}
497#else
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
499#endif
500
501
502/** Opcode 0x0f 0x01 0xde. */
503#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
504FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
505{
506 IEMOP_MNEMONIC(skinit, "skinit");
507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
508 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
509}
510#else
511FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
512#endif
513
514
515/** Opcode 0x0f 0x01 /4. */
516FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
517{
518 IEMOP_MNEMONIC(smsw, "smsw");
519 IEMOP_HLP_MIN_286();
520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
521 {
522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
523 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
524 }
525
526 /* Ignore operand size here, memory refs are always 16-bit. */
527 IEM_MC_BEGIN(2, 0);
528 IEM_MC_ARG(uint16_t, iEffSeg, 0);
529 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
532 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
533 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
534 IEM_MC_END();
535 return VINF_SUCCESS;
536}
537
538
539/** Opcode 0x0f 0x01 /6. */
540FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
541{
542 /* The operand size is effectively ignored, all is 16-bit and only the
543 lower 3-bits are used. */
544 IEMOP_MNEMONIC(lmsw, "lmsw");
545 IEMOP_HLP_MIN_286();
546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
547 {
548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
549 IEM_MC_BEGIN(2, 0);
550 IEM_MC_ARG(uint16_t, u16Tmp, 0);
551 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
552 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
553 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
554 IEM_MC_END();
555 }
556 else
557 {
558 IEM_MC_BEGIN(2, 0);
559 IEM_MC_ARG(uint16_t, u16Tmp, 0);
560 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
563 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
564 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
565 IEM_MC_END();
566 }
567 return VINF_SUCCESS;
568}
569
570
571/** Opcode 0x0f 0x01 /7. */
572FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
573{
574 IEMOP_MNEMONIC(invlpg, "invlpg");
575 IEMOP_HLP_MIN_486();
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEM_MC_BEGIN(1, 1);
578 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
580 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
581 IEM_MC_END();
582 return VINF_SUCCESS;
583}
584
585
586/** Opcode 0x0f 0x01 /7. */
587FNIEMOP_DEF(iemOp_Grp7_swapgs)
588{
589 IEMOP_MNEMONIC(swapgs, "swapgs");
590 IEMOP_HLP_ONLY_64BIT();
591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
592 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
593}
594
595
596/** Opcode 0x0f 0x01 /7. */
597FNIEMOP_DEF(iemOp_Grp7_rdtscp)
598{
599 IEMOP_MNEMONIC(rdtscp, "rdtscp");
600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
601 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
602}
603
604
605/**
606 * Group 7 jump table, memory variant.
607 */
608IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
609{
610 iemOp_Grp7_sgdt,
611 iemOp_Grp7_sidt,
612 iemOp_Grp7_lgdt,
613 iemOp_Grp7_lidt,
614 iemOp_Grp7_smsw,
615 iemOp_InvalidWithRM,
616 iemOp_Grp7_lmsw,
617 iemOp_Grp7_invlpg
618};
619
620
621/** Opcode 0x0f 0x01. */
622FNIEMOP_DEF(iemOp_Grp7)
623{
624 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
625 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
626 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
627
628 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
629 {
630 case 0:
631 switch (bRm & X86_MODRM_RM_MASK)
632 {
633 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
634 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
635 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
636 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
637 }
638 return IEMOP_RAISE_INVALID_OPCODE();
639
640 case 1:
641 switch (bRm & X86_MODRM_RM_MASK)
642 {
643 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
644 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
645 }
646 return IEMOP_RAISE_INVALID_OPCODE();
647
648 case 2:
649 switch (bRm & X86_MODRM_RM_MASK)
650 {
651 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
652 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
653 }
654 return IEMOP_RAISE_INVALID_OPCODE();
655
656 case 3:
657 switch (bRm & X86_MODRM_RM_MASK)
658 {
659 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
660 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
661 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
662 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
663 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
664 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
665 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
666 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
667 IEM_NOT_REACHED_DEFAULT_CASE_RET();
668 }
669
670 case 4:
671 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
672
673 case 5:
674 return IEMOP_RAISE_INVALID_OPCODE();
675
676 case 6:
677 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
678
679 case 7:
680 switch (bRm & X86_MODRM_RM_MASK)
681 {
682 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
683 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
684 }
685 return IEMOP_RAISE_INVALID_OPCODE();
686
687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
688 }
689}
690
691/** Opcode 0x0f 0x00 /3. */
692FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
693{
694 IEMOP_HLP_NO_REAL_OR_V86_MODE();
695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
696
697 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
698 {
699 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
700 switch (pVCpu->iem.s.enmEffOpSize)
701 {
702 case IEMMODE_16BIT:
703 {
704 IEM_MC_BEGIN(3, 0);
705 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
706 IEM_MC_ARG(uint16_t, u16Sel, 1);
707 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
708
709 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
710 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
711 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
712
713 IEM_MC_END();
714 return VINF_SUCCESS;
715 }
716
717 case IEMMODE_32BIT:
718 case IEMMODE_64BIT:
719 {
720 IEM_MC_BEGIN(3, 0);
721 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
722 IEM_MC_ARG(uint16_t, u16Sel, 1);
723 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
724
725 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
726 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
727 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
728
729 IEM_MC_END();
730 return VINF_SUCCESS;
731 }
732
733 IEM_NOT_REACHED_DEFAULT_CASE_RET();
734 }
735 }
736 else
737 {
738 switch (pVCpu->iem.s.enmEffOpSize)
739 {
740 case IEMMODE_16BIT:
741 {
742 IEM_MC_BEGIN(3, 1);
743 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
744 IEM_MC_ARG(uint16_t, u16Sel, 1);
745 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
747
748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
749 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
750
751 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
752 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
753 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
754
755 IEM_MC_END();
756 return VINF_SUCCESS;
757 }
758
759 case IEMMODE_32BIT:
760 case IEMMODE_64BIT:
761 {
762 IEM_MC_BEGIN(3, 1);
763 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
764 IEM_MC_ARG(uint16_t, u16Sel, 1);
765 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
767
768 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
769 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
770/** @todo testcase: make sure it's a 16-bit read. */
771
772 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
773 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
774 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
775
776 IEM_MC_END();
777 return VINF_SUCCESS;
778 }
779
780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
781 }
782 }
783}
784
785
786
787/** Opcode 0x0f 0x02. */
788FNIEMOP_DEF(iemOp_lar_Gv_Ew)
789{
790 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
791 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
792}
793
794
795/** Opcode 0x0f 0x03. */
796FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
797{
798 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
799 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
800}
801
802
803/** Opcode 0x0f 0x05. */
804FNIEMOP_DEF(iemOp_syscall)
805{
806 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
808 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
809}
810
811
812/** Opcode 0x0f 0x06. */
813FNIEMOP_DEF(iemOp_clts)
814{
815 IEMOP_MNEMONIC(clts, "clts");
816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
817 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
818}
819
820
821/** Opcode 0x0f 0x07. */
822FNIEMOP_DEF(iemOp_sysret)
823{
824 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
826 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
827}
828
829
830/** Opcode 0x0f 0x08. */
831FNIEMOP_DEF(iemOp_invd)
832{
833 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
834 IEMOP_HLP_MIN_486();
835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
836 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
837}
838
839
840/** Opcode 0x0f 0x09. */
841FNIEMOP_DEF(iemOp_wbinvd)
842{
843 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
844 IEMOP_HLP_MIN_486();
845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
846 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
847}
848
849
850/** Opcode 0x0f 0x0b. */
851FNIEMOP_DEF(iemOp_ud2)
852{
853 IEMOP_MNEMONIC(ud2, "ud2");
854 return IEMOP_RAISE_INVALID_OPCODE();
855}
856
857/** Opcode 0x0f 0x0d. */
858FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
859{
860 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
861 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
862 {
863 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
864 return IEMOP_RAISE_INVALID_OPCODE();
865 }
866
867 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
868 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
869 {
870 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
871 return IEMOP_RAISE_INVALID_OPCODE();
872 }
873
874 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
875 {
876 case 2: /* Aliased to /0 for the time being. */
877 case 4: /* Aliased to /0 for the time being. */
878 case 5: /* Aliased to /0 for the time being. */
879 case 6: /* Aliased to /0 for the time being. */
880 case 7: /* Aliased to /0 for the time being. */
881 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
882 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
883 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
884 IEM_NOT_REACHED_DEFAULT_CASE_RET();
885 }
886
887 IEM_MC_BEGIN(0, 1);
888 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
891 /* Currently a NOP. */
892 NOREF(GCPtrEffSrc);
893 IEM_MC_ADVANCE_RIP();
894 IEM_MC_END();
895 return VINF_SUCCESS;
896}
897
898
899/** Opcode 0x0f 0x0e. */
900FNIEMOP_DEF(iemOp_femms)
901{
902 IEMOP_MNEMONIC(femms, "femms");
903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
904
905 IEM_MC_BEGIN(0,0);
906 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
907 IEM_MC_MAYBE_RAISE_FPU_XCPT();
908 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
909 IEM_MC_FPU_FROM_MMX_MODE();
910 IEM_MC_ADVANCE_RIP();
911 IEM_MC_END();
912 return VINF_SUCCESS;
913}
914
915
916/** Opcode 0x0f 0x0f. */
917FNIEMOP_DEF(iemOp_3Dnow)
918{
919 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
920 {
921 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
922 return IEMOP_RAISE_INVALID_OPCODE();
923 }
924
925#ifdef IEM_WITH_3DNOW
926 /* This is pretty sparse, use switch instead of table. */
927 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
928 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
929#else
930 IEMOP_BITCH_ABOUT_STUB();
931 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
932#endif
933}
934
935
936/**
937 * @opcode 0x10
938 * @oppfx none
939 * @opcpuid sse
940 * @opgroup og_sse_simdfp_datamove
941 * @opxcpttype 4UA
942 * @optest op1=1 op2=2 -> op1=2
943 * @optest op1=0 op2=-22 -> op1=-22
944 */
945FNIEMOP_DEF(iemOp_movups_Vps_Wps)
946{
947 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
949 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
950 {
951 /*
952 * Register, register.
953 */
954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
955 IEM_MC_BEGIN(0, 0);
956 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
957 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
958 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
959 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
960 IEM_MC_ADVANCE_RIP();
961 IEM_MC_END();
962 }
963 else
964 {
965 /*
966 * Memory, register.
967 */
968 IEM_MC_BEGIN(0, 2);
969 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
971
972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
974 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
975 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
976
977 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
978 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
979
980 IEM_MC_ADVANCE_RIP();
981 IEM_MC_END();
982 }
983 return VINF_SUCCESS;
984
985}
986
987
988/**
989 * @opcode 0x10
990 * @oppfx 0x66
991 * @opcpuid sse2
992 * @opgroup og_sse2_pcksclr_datamove
993 * @opxcpttype 4UA
994 * @optest op1=1 op2=2 -> op1=2
995 * @optest op1=0 op2=-42 -> op1=-42
996 */
997FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
998{
999 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1000 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1001 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1002 {
1003 /*
1004 * Register, register.
1005 */
1006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1007 IEM_MC_BEGIN(0, 0);
1008 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1009 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1010 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1011 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1012 IEM_MC_ADVANCE_RIP();
1013 IEM_MC_END();
1014 }
1015 else
1016 {
1017 /*
1018 * Memory, register.
1019 */
1020 IEM_MC_BEGIN(0, 2);
1021 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1023
1024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1026 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1027 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1028
1029 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1030 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1031
1032 IEM_MC_ADVANCE_RIP();
1033 IEM_MC_END();
1034 }
1035 return VINF_SUCCESS;
1036}
1037
1038
1039/**
1040 * @opcode 0x10
1041 * @oppfx 0xf3
1042 * @opcpuid sse
1043 * @opgroup og_sse_simdfp_datamove
1044 * @opxcpttype 5
1045 * @optest op1=1 op2=2 -> op1=2
1046 * @optest op1=0 op2=-22 -> op1=-22
1047 */
1048FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1049{
1050 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1052 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1053 {
1054 /*
1055 * Register, register.
1056 */
1057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1058 IEM_MC_BEGIN(0, 1);
1059 IEM_MC_LOCAL(uint32_t, uSrc);
1060
1061 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1062 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1063 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1064 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1065
1066 IEM_MC_ADVANCE_RIP();
1067 IEM_MC_END();
1068 }
1069 else
1070 {
1071 /*
1072 * Memory, register.
1073 */
1074 IEM_MC_BEGIN(0, 2);
1075 IEM_MC_LOCAL(uint32_t, uSrc);
1076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1077
1078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1080 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1081 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1082
1083 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1084 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1085
1086 IEM_MC_ADVANCE_RIP();
1087 IEM_MC_END();
1088 }
1089 return VINF_SUCCESS;
1090}
1091
1092
1093/**
1094 * @opcode 0x10
1095 * @oppfx 0xf2
1096 * @opcpuid sse2
1097 * @opgroup og_sse2_pcksclr_datamove
1098 * @opxcpttype 5
1099 * @optest op1=1 op2=2 -> op1=2
1100 * @optest op1=0 op2=-42 -> op1=-42
1101 */
1102FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1103{
1104 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1106 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1107 {
1108 /*
1109 * Register, register.
1110 */
1111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1112 IEM_MC_BEGIN(0, 1);
1113 IEM_MC_LOCAL(uint64_t, uSrc);
1114
1115 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1116 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1117 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1118 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1119
1120 IEM_MC_ADVANCE_RIP();
1121 IEM_MC_END();
1122 }
1123 else
1124 {
1125 /*
1126 * Memory, register.
1127 */
1128 IEM_MC_BEGIN(0, 2);
1129 IEM_MC_LOCAL(uint64_t, uSrc);
1130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1131
1132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1134 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1135 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1136
1137 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1138 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1139
1140 IEM_MC_ADVANCE_RIP();
1141 IEM_MC_END();
1142 }
1143 return VINF_SUCCESS;
1144}
1145
1146
1147/**
1148 * @opcode 0x11
1149 * @oppfx none
1150 * @opcpuid sse
1151 * @opgroup og_sse_simdfp_datamove
1152 * @opxcpttype 4UA
1153 * @optest op1=1 op2=2 -> op1=2
1154 * @optest op1=0 op2=-42 -> op1=-42
1155 */
1156FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1157{
1158 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1160 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1161 {
1162 /*
1163 * Register, register.
1164 */
1165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1166 IEM_MC_BEGIN(0, 0);
1167 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1168 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1169 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1170 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1171 IEM_MC_ADVANCE_RIP();
1172 IEM_MC_END();
1173 }
1174 else
1175 {
1176 /*
1177 * Memory, register.
1178 */
1179 IEM_MC_BEGIN(0, 2);
1180 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1182
1183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1185 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1186 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1187
1188 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1189 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1190
1191 IEM_MC_ADVANCE_RIP();
1192 IEM_MC_END();
1193 }
1194 return VINF_SUCCESS;
1195}
1196
1197
1198/**
1199 * @opcode 0x11
1200 * @oppfx 0x66
1201 * @opcpuid sse2
1202 * @opgroup og_sse2_pcksclr_datamove
1203 * @opxcpttype 4UA
1204 * @optest op1=1 op2=2 -> op1=2
1205 * @optest op1=0 op2=-42 -> op1=-42
1206 */
1207FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1208{
1209 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1210 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1211 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1212 {
1213 /*
1214 * Register, register.
1215 */
1216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1217 IEM_MC_BEGIN(0, 0);
1218 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1219 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1220 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1221 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1222 IEM_MC_ADVANCE_RIP();
1223 IEM_MC_END();
1224 }
1225 else
1226 {
1227 /*
1228 * Memory, register.
1229 */
1230 IEM_MC_BEGIN(0, 2);
1231 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1233
1234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1236 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1237 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1238
1239 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1240 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1241
1242 IEM_MC_ADVANCE_RIP();
1243 IEM_MC_END();
1244 }
1245 return VINF_SUCCESS;
1246}
1247
1248
1249/**
1250 * @opcode 0x11
1251 * @oppfx 0xf3
1252 * @opcpuid sse
1253 * @opgroup og_sse_simdfp_datamove
1254 * @opxcpttype 5
1255 * @optest op1=1 op2=2 -> op1=2
1256 * @optest op1=0 op2=-22 -> op1=-22
1257 */
1258FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1259{
1260 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1262 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1263 {
1264 /*
1265 * Register, register.
1266 */
1267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1268 IEM_MC_BEGIN(0, 1);
1269 IEM_MC_LOCAL(uint32_t, uSrc);
1270
1271 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1272 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1273 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1274 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1275
1276 IEM_MC_ADVANCE_RIP();
1277 IEM_MC_END();
1278 }
1279 else
1280 {
1281 /*
1282 * Memory, register.
1283 */
1284 IEM_MC_BEGIN(0, 2);
1285 IEM_MC_LOCAL(uint32_t, uSrc);
1286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1287
1288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1290 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1291 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1292
1293 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1294 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1295
1296 IEM_MC_ADVANCE_RIP();
1297 IEM_MC_END();
1298 }
1299 return VINF_SUCCESS;
1300}
1301
1302
1303/**
1304 * @opcode 0x11
1305 * @oppfx 0xf2
1306 * @opcpuid sse2
1307 * @opgroup og_sse2_pcksclr_datamove
1308 * @opxcpttype 5
1309 * @optest op1=1 op2=2 -> op1=2
1310 * @optest op1=0 op2=-42 -> op1=-42
1311 */
1312FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1313{
1314 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1315 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1316 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1317 {
1318 /*
1319 * Register, register.
1320 */
1321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1322 IEM_MC_BEGIN(0, 1);
1323 IEM_MC_LOCAL(uint64_t, uSrc);
1324
1325 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1326 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1327 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1328 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1329
1330 IEM_MC_ADVANCE_RIP();
1331 IEM_MC_END();
1332 }
1333 else
1334 {
1335 /*
1336 * Memory, register.
1337 */
1338 IEM_MC_BEGIN(0, 2);
1339 IEM_MC_LOCAL(uint64_t, uSrc);
1340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1341
1342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1344 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1345 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1346
1347 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1348 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1349
1350 IEM_MC_ADVANCE_RIP();
1351 IEM_MC_END();
1352 }
1353 return VINF_SUCCESS;
1354}
1355
1356
1357FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1358{
1359 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1360 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1361 {
1362 /**
1363 * @opcode 0x12
1364 * @opcodesub 11 mr/reg
1365 * @oppfx none
1366 * @opcpuid sse
1367 * @opgroup og_sse_simdfp_datamove
1368 * @opxcpttype 5
1369 * @optest op1=1 op2=2 -> op1=2
1370 * @optest op1=0 op2=-42 -> op1=-42
1371 */
1372 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1373
1374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1375 IEM_MC_BEGIN(0, 1);
1376 IEM_MC_LOCAL(uint64_t, uSrc);
1377
1378 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1379 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1380 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1381 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1382
1383 IEM_MC_ADVANCE_RIP();
1384 IEM_MC_END();
1385 }
1386 else
1387 {
1388 /**
1389 * @opdone
1390 * @opcode 0x12
1391 * @opcodesub !11 mr/reg
1392 * @oppfx none
1393 * @opcpuid sse
1394 * @opgroup og_sse_simdfp_datamove
1395 * @opxcpttype 5
1396 * @optest op1=1 op2=2 -> op1=2
1397 * @optest op1=0 op2=-42 -> op1=-42
1398 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1399 */
1400 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1401
1402 IEM_MC_BEGIN(0, 2);
1403 IEM_MC_LOCAL(uint64_t, uSrc);
1404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1405
1406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1408 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1409 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1410
1411 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1412 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1413
1414 IEM_MC_ADVANCE_RIP();
1415 IEM_MC_END();
1416 }
1417 return VINF_SUCCESS;
1418}
1419
1420
1421/**
1422 * @opcode 0x12
1423 * @opcodesub !11 mr/reg
1424 * @oppfx 0x66
1425 * @opcpuid sse2
1426 * @opgroup og_sse2_pcksclr_datamove
1427 * @opxcpttype 5
1428 * @optest op1=1 op2=2 -> op1=2
1429 * @optest op1=0 op2=-42 -> op1=-42
1430 */
1431FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1432{
1433 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1434 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1435 {
1436 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1437
1438 IEM_MC_BEGIN(0, 2);
1439 IEM_MC_LOCAL(uint64_t, uSrc);
1440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1441
1442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1444 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1445 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1446
1447 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1448 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1449
1450 IEM_MC_ADVANCE_RIP();
1451 IEM_MC_END();
1452 return VINF_SUCCESS;
1453 }
1454
1455 /**
1456 * @opdone
1457 * @opmnemonic ud660f12m3
1458 * @opcode 0x12
1459 * @opcodesub 11 mr/reg
1460 * @oppfx 0x66
1461 * @opunused immediate
1462 * @opcpuid sse
1463 * @optest ->
1464 */
1465 return IEMOP_RAISE_INVALID_OPCODE();
1466}
1467
1468
1469/**
1470 * @opcode 0x12
1471 * @oppfx 0xf3
1472 * @opcpuid sse3
1473 * @opgroup og_sse3_pcksclr_datamove
1474 * @opxcpttype 4
1475 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1476 * op1=0x00000002000000020000000100000001
1477 */
1478FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1479{
1480 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1482 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1483 {
1484 /*
1485 * Register, register.
1486 */
1487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1488 IEM_MC_BEGIN(2, 0);
1489 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1490 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1491
1492 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1493 IEM_MC_PREPARE_SSE_USAGE();
1494
1495 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1496 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1497 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1498
1499 IEM_MC_ADVANCE_RIP();
1500 IEM_MC_END();
1501 }
1502 else
1503 {
1504 /*
1505 * Register, memory.
1506 */
1507 IEM_MC_BEGIN(2, 2);
1508 IEM_MC_LOCAL(RTUINT128U, uSrc);
1509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1510 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1511 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1512
1513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1515 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1516 IEM_MC_PREPARE_SSE_USAGE();
1517
1518 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1519 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1520 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1521
1522 IEM_MC_ADVANCE_RIP();
1523 IEM_MC_END();
1524 }
1525 return VINF_SUCCESS;
1526}
1527
1528
1529/**
1530 * @opcode 0x12
1531 * @oppfx 0xf2
1532 * @opcpuid sse3
1533 * @opgroup og_sse3_pcksclr_datamove
1534 * @opxcpttype 5
1535 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1536 * op1=0x22222222111111112222222211111111
1537 */
1538FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1539{
1540 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1542 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1543 {
1544 /*
1545 * Register, register.
1546 */
1547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1548 IEM_MC_BEGIN(2, 0);
1549 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1550 IEM_MC_ARG(uint64_t, uSrc, 1);
1551
1552 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1553 IEM_MC_PREPARE_SSE_USAGE();
1554
1555 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1556 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1557 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1558
1559 IEM_MC_ADVANCE_RIP();
1560 IEM_MC_END();
1561 }
1562 else
1563 {
1564 /*
1565 * Register, memory.
1566 */
1567 IEM_MC_BEGIN(2, 2);
1568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1569 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1570 IEM_MC_ARG(uint64_t, uSrc, 1);
1571
1572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1574 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1575 IEM_MC_PREPARE_SSE_USAGE();
1576
1577 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1578 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1579 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1580
1581 IEM_MC_ADVANCE_RIP();
1582 IEM_MC_END();
1583 }
1584 return VINF_SUCCESS;
1585}
1586
1587
1588/**
1589 * @opcode 0x13
1590 * @opcodesub !11 mr/reg
1591 * @oppfx none
1592 * @opcpuid sse
1593 * @opgroup og_sse_simdfp_datamove
1594 * @opxcpttype 5
1595 * @optest op1=1 op2=2 -> op1=2
1596 * @optest op1=0 op2=-42 -> op1=-42
1597 */
1598FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1599{
1600 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1601 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1602 {
1603 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1604
1605 IEM_MC_BEGIN(0, 2);
1606 IEM_MC_LOCAL(uint64_t, uSrc);
1607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1608
1609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1611 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1612 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1613
1614 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1615 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1616
1617 IEM_MC_ADVANCE_RIP();
1618 IEM_MC_END();
1619 return VINF_SUCCESS;
1620 }
1621
1622 /**
1623 * @opdone
1624 * @opmnemonic ud0f13m3
1625 * @opcode 0x13
1626 * @opcodesub 11 mr/reg
1627 * @oppfx none
1628 * @opunused immediate
1629 * @opcpuid sse
1630 * @optest ->
1631 */
1632 return IEMOP_RAISE_INVALID_OPCODE();
1633}
1634
1635
1636/**
1637 * @opcode 0x13
1638 * @opcodesub !11 mr/reg
1639 * @oppfx 0x66
1640 * @opcpuid sse2
1641 * @opgroup og_sse2_pcksclr_datamove
1642 * @opxcpttype 5
1643 * @optest op1=1 op2=2 -> op1=2
1644 * @optest op1=0 op2=-42 -> op1=-42
1645 */
1646FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1647{
1648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1649 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1650 {
1651 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1652 IEM_MC_BEGIN(0, 2);
1653 IEM_MC_LOCAL(uint64_t, uSrc);
1654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1655
1656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1658 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1659 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1660
1661 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1662 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1663
1664 IEM_MC_ADVANCE_RIP();
1665 IEM_MC_END();
1666 return VINF_SUCCESS;
1667 }
1668
1669 /**
1670 * @opdone
1671 * @opmnemonic ud660f13m3
1672 * @opcode 0x13
1673 * @opcodesub 11 mr/reg
1674 * @oppfx 0x66
1675 * @opunused immediate
1676 * @opcpuid sse
1677 * @optest ->
1678 */
1679 return IEMOP_RAISE_INVALID_OPCODE();
1680}
1681
1682
1683/**
1684 * @opmnemonic udf30f13
1685 * @opcode 0x13
1686 * @oppfx 0xf3
1687 * @opunused intel-modrm
1688 * @opcpuid sse
1689 * @optest ->
1690 * @opdone
1691 */
1692
1693/**
1694 * @opmnemonic udf20f13
1695 * @opcode 0x13
1696 * @oppfx 0xf2
1697 * @opunused intel-modrm
1698 * @opcpuid sse
1699 * @optest ->
1700 * @opdone
1701 */
1702
1703/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1704FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1705/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1706FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1707
1708/**
1709 * @opdone
1710 * @opmnemonic udf30f14
1711 * @opcode 0x14
1712 * @oppfx 0xf3
1713 * @opunused intel-modrm
1714 * @opcpuid sse
1715 * @optest ->
1716 * @opdone
1717 */
1718
1719/**
1720 * @opmnemonic udf20f14
1721 * @opcode 0x14
1722 * @oppfx 0xf2
1723 * @opunused intel-modrm
1724 * @opcpuid sse
1725 * @optest ->
1726 * @opdone
1727 */
1728
1729/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1730FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1731/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1732FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1733/* Opcode 0xf3 0x0f 0x15 - invalid */
1734/* Opcode 0xf2 0x0f 0x15 - invalid */
1735
1736/**
1737 * @opdone
1738 * @opmnemonic udf30f15
1739 * @opcode 0x15
1740 * @oppfx 0xf3
1741 * @opunused intel-modrm
1742 * @opcpuid sse
1743 * @optest ->
1744 * @opdone
1745 */
1746
1747/**
1748 * @opmnemonic udf20f15
1749 * @opcode 0x15
1750 * @oppfx 0xf2
1751 * @opunused intel-modrm
1752 * @opcpuid sse
1753 * @optest ->
1754 * @opdone
1755 */
1756
1757FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1758{
1759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1760 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1761 {
1762 /**
1763 * @opcode 0x16
1764 * @opcodesub 11 mr/reg
1765 * @oppfx none
1766 * @opcpuid sse
1767 * @opgroup og_sse_simdfp_datamove
1768 * @opxcpttype 5
1769 * @optest op1=1 op2=2 -> op1=2
1770 * @optest op1=0 op2=-42 -> op1=-42
1771 */
1772 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1773
1774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1775 IEM_MC_BEGIN(0, 1);
1776 IEM_MC_LOCAL(uint64_t, uSrc);
1777
1778 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1779 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1780 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1781 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1782
1783 IEM_MC_ADVANCE_RIP();
1784 IEM_MC_END();
1785 }
1786 else
1787 {
1788 /**
1789 * @opdone
1790 * @opcode 0x16
1791 * @opcodesub !11 mr/reg
1792 * @oppfx none
1793 * @opcpuid sse
1794 * @opgroup og_sse_simdfp_datamove
1795 * @opxcpttype 5
1796 * @optest op1=1 op2=2 -> op1=2
1797 * @optest op1=0 op2=-42 -> op1=-42
1798 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1799 */
1800 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1801
1802 IEM_MC_BEGIN(0, 2);
1803 IEM_MC_LOCAL(uint64_t, uSrc);
1804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1805
1806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1808 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1809 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1810
1811 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1812 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1813
1814 IEM_MC_ADVANCE_RIP();
1815 IEM_MC_END();
1816 }
1817 return VINF_SUCCESS;
1818}
1819
1820
1821/**
1822 * @opcode 0x16
1823 * @opcodesub !11 mr/reg
1824 * @oppfx 0x66
1825 * @opcpuid sse2
1826 * @opgroup og_sse2_pcksclr_datamove
1827 * @opxcpttype 5
1828 * @optest op1=1 op2=2 -> op1=2
1829 * @optest op1=0 op2=-42 -> op1=-42
1830 */
1831FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1832{
1833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1834 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1835 {
1836 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1837 IEM_MC_BEGIN(0, 2);
1838 IEM_MC_LOCAL(uint64_t, uSrc);
1839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1840
1841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1843 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1844 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1845
1846 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1847 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1848
1849 IEM_MC_ADVANCE_RIP();
1850 IEM_MC_END();
1851 return VINF_SUCCESS;
1852 }
1853
1854 /**
1855 * @opdone
1856 * @opmnemonic ud660f16m3
1857 * @opcode 0x16
1858 * @opcodesub 11 mr/reg
1859 * @oppfx 0x66
1860 * @opunused immediate
1861 * @opcpuid sse
1862 * @optest ->
1863 */
1864 return IEMOP_RAISE_INVALID_OPCODE();
1865}
1866
1867
1868/**
1869 * @opcode 0x16
1870 * @oppfx 0xf3
1871 * @opcpuid sse3
1872 * @opgroup og_sse3_pcksclr_datamove
1873 * @opxcpttype 4
1874 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1875 * op1=0x00000002000000020000000100000001
1876 */
1877FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1878{
1879 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1880 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1881 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1882 {
1883 /*
1884 * Register, register.
1885 */
1886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1887 IEM_MC_BEGIN(2, 0);
1888 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1889 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1890
1891 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1892 IEM_MC_PREPARE_SSE_USAGE();
1893
1894 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1895 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1896 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1897
1898 IEM_MC_ADVANCE_RIP();
1899 IEM_MC_END();
1900 }
1901 else
1902 {
1903 /*
1904 * Register, memory.
1905 */
1906 IEM_MC_BEGIN(2, 2);
1907 IEM_MC_LOCAL(RTUINT128U, uSrc);
1908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1909 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1910 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1911
1912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1914 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1915 IEM_MC_PREPARE_SSE_USAGE();
1916
1917 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1918 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1919 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1920
1921 IEM_MC_ADVANCE_RIP();
1922 IEM_MC_END();
1923 }
1924 return VINF_SUCCESS;
1925}
1926
1927/**
1928 * @opdone
1929 * @opmnemonic udf30f16
1930 * @opcode 0x16
1931 * @oppfx 0xf2
1932 * @opunused intel-modrm
1933 * @opcpuid sse
1934 * @optest ->
1935 * @opdone
1936 */
1937
1938
1939/**
1940 * @opcode 0x17
1941 * @opcodesub !11 mr/reg
1942 * @oppfx none
1943 * @opcpuid sse
1944 * @opgroup og_sse_simdfp_datamove
1945 * @opxcpttype 5
1946 * @optest op1=1 op2=2 -> op1=2
1947 * @optest op1=0 op2=-42 -> op1=-42
1948 */
1949FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
1950{
1951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1952 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1953 {
1954 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1955
1956 IEM_MC_BEGIN(0, 2);
1957 IEM_MC_LOCAL(uint64_t, uSrc);
1958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1959
1960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1962 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1963 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1964
1965 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1966 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1967
1968 IEM_MC_ADVANCE_RIP();
1969 IEM_MC_END();
1970 return VINF_SUCCESS;
1971 }
1972
1973 /**
1974 * @opdone
1975 * @opmnemonic ud0f17m3
1976 * @opcode 0x17
1977 * @opcodesub 11 mr/reg
1978 * @oppfx none
1979 * @opunused immediate
1980 * @opcpuid sse
1981 * @optest ->
1982 */
1983 return IEMOP_RAISE_INVALID_OPCODE();
1984}
1985
1986
1987/**
1988 * @opcode 0x17
1989 * @opcodesub !11 mr/reg
1990 * @oppfx 0x66
1991 * @opcpuid sse2
1992 * @opgroup og_sse2_pcksclr_datamove
1993 * @opxcpttype 5
1994 * @optest op1=1 op2=2 -> op1=2
1995 * @optest op1=0 op2=-42 -> op1=-42
1996 */
1997FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
1998{
1999 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2000 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2001 {
2002 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2003
2004 IEM_MC_BEGIN(0, 2);
2005 IEM_MC_LOCAL(uint64_t, uSrc);
2006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2007
2008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2010 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2011 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2012
2013 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2014 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2015
2016 IEM_MC_ADVANCE_RIP();
2017 IEM_MC_END();
2018 return VINF_SUCCESS;
2019 }
2020
2021 /**
2022 * @opdone
2023 * @opmnemonic ud660f17m3
2024 * @opcode 0x17
2025 * @opcodesub 11 mr/reg
2026 * @oppfx 0x66
2027 * @opunused immediate
2028 * @opcpuid sse
2029 * @optest ->
2030 */
2031 return IEMOP_RAISE_INVALID_OPCODE();
2032}
2033
2034
2035/**
2036 * @opdone
2037 * @opmnemonic udf30f17
2038 * @opcode 0x17
2039 * @oppfx 0xf3
2040 * @opunused intel-modrm
2041 * @opcpuid sse
2042 * @optest ->
2043 * @opdone
2044 */
2045
2046/**
2047 * @opmnemonic udf20f17
2048 * @opcode 0x17
2049 * @oppfx 0xf2
2050 * @opunused intel-modrm
2051 * @opcpuid sse
2052 * @optest ->
2053 * @opdone
2054 */
2055
2056
2057/** Opcode 0x0f 0x18. */
2058FNIEMOP_DEF(iemOp_prefetch_Grp16)
2059{
2060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2061 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2062 {
2063 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2064 {
2065 case 4: /* Aliased to /0 for the time being according to AMD. */
2066 case 5: /* Aliased to /0 for the time being according to AMD. */
2067 case 6: /* Aliased to /0 for the time being according to AMD. */
2068 case 7: /* Aliased to /0 for the time being according to AMD. */
2069 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2070 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2071 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2072 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2074 }
2075
2076 IEM_MC_BEGIN(0, 1);
2077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2080 /* Currently a NOP. */
2081 NOREF(GCPtrEffSrc);
2082 IEM_MC_ADVANCE_RIP();
2083 IEM_MC_END();
2084 return VINF_SUCCESS;
2085 }
2086
2087 return IEMOP_RAISE_INVALID_OPCODE();
2088}
2089
2090
2091/** Opcode 0x0f 0x19..0x1f. */
2092FNIEMOP_DEF(iemOp_nop_Ev)
2093{
2094 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2095 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2096 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2097 {
2098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2099 IEM_MC_BEGIN(0, 0);
2100 IEM_MC_ADVANCE_RIP();
2101 IEM_MC_END();
2102 }
2103 else
2104 {
2105 IEM_MC_BEGIN(0, 1);
2106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2109 /* Currently a NOP. */
2110 NOREF(GCPtrEffSrc);
2111 IEM_MC_ADVANCE_RIP();
2112 IEM_MC_END();
2113 }
2114 return VINF_SUCCESS;
2115}
2116
2117
2118/** Opcode 0x0f 0x20. */
2119FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2120{
2121 /* mod is ignored, as is operand size overrides. */
2122 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2123 IEMOP_HLP_MIN_386();
2124 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2125 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2126 else
2127 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2128
2129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2130 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2131 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2132 {
2133 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2134 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2135 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2136 iCrReg |= 8;
2137 }
2138 switch (iCrReg)
2139 {
2140 case 0: case 2: case 3: case 4: case 8:
2141 break;
2142 default:
2143 return IEMOP_RAISE_INVALID_OPCODE();
2144 }
2145 IEMOP_HLP_DONE_DECODING();
2146
2147 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2148}
2149
2150
2151/** Opcode 0x0f 0x21. */
2152FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2153{
2154 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2155 IEMOP_HLP_MIN_386();
2156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2158 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2159 return IEMOP_RAISE_INVALID_OPCODE();
2160 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2161 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2162 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2163}
2164
2165
2166/** Opcode 0x0f 0x22. */
2167FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2168{
2169 /* mod is ignored, as is operand size overrides. */
2170 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2171 IEMOP_HLP_MIN_386();
2172 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2173 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2174 else
2175 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2176
2177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2178 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2179 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2180 {
2181 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2182 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2183 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2184 iCrReg |= 8;
2185 }
2186 switch (iCrReg)
2187 {
2188 case 0: case 2: case 3: case 4: case 8:
2189 break;
2190 default:
2191 return IEMOP_RAISE_INVALID_OPCODE();
2192 }
2193 IEMOP_HLP_DONE_DECODING();
2194
2195 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2196}
2197
2198
2199/** Opcode 0x0f 0x23. */
2200FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2201{
2202 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2203 IEMOP_HLP_MIN_386();
2204 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2206 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2207 return IEMOP_RAISE_INVALID_OPCODE();
2208 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2209 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2210 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2211}
2212
2213
2214/** Opcode 0x0f 0x24. */
2215FNIEMOP_DEF(iemOp_mov_Rd_Td)
2216{
2217 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2218 IEMOP_HLP_MIN_386();
2219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2221 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
2222 return IEMOP_RAISE_INVALID_OPCODE();
2223 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
2224 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2225 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2226}
2227
2228
2229/** Opcode 0x0f 0x26. */
2230FNIEMOP_DEF(iemOp_mov_Td_Rd)
2231{
2232 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2233 IEMOP_HLP_MIN_386();
2234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2236 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
2237 return IEMOP_RAISE_INVALID_OPCODE();
2238 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
2239 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2240 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2241}
2242
2243
2244/**
2245 * @opcode 0x28
2246 * @oppfx none
2247 * @opcpuid sse
2248 * @opgroup og_sse_simdfp_datamove
2249 * @opxcpttype 1
2250 * @optest op1=1 op2=2 -> op1=2
2251 * @optest op1=0 op2=-42 -> op1=-42
2252 */
2253FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2254{
2255 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2257 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2258 {
2259 /*
2260 * Register, register.
2261 */
2262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2263 IEM_MC_BEGIN(0, 0);
2264 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2265 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2266 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2267 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2268 IEM_MC_ADVANCE_RIP();
2269 IEM_MC_END();
2270 }
2271 else
2272 {
2273 /*
2274 * Register, memory.
2275 */
2276 IEM_MC_BEGIN(0, 2);
2277 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2279
2280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2282 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2283 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2284
2285 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2286 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2287
2288 IEM_MC_ADVANCE_RIP();
2289 IEM_MC_END();
2290 }
2291 return VINF_SUCCESS;
2292}
2293
2294/**
2295 * @opcode 0x28
2296 * @oppfx 66
2297 * @opcpuid sse2
2298 * @opgroup og_sse2_pcksclr_datamove
2299 * @opxcpttype 1
2300 * @optest op1=1 op2=2 -> op1=2
2301 * @optest op1=0 op2=-42 -> op1=-42
2302 */
2303FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2304{
2305 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2307 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2308 {
2309 /*
2310 * Register, register.
2311 */
2312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2313 IEM_MC_BEGIN(0, 0);
2314 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2315 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2316 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2317 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2318 IEM_MC_ADVANCE_RIP();
2319 IEM_MC_END();
2320 }
2321 else
2322 {
2323 /*
2324 * Register, memory.
2325 */
2326 IEM_MC_BEGIN(0, 2);
2327 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2329
2330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2332 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2333 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2334
2335 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2336 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2337
2338 IEM_MC_ADVANCE_RIP();
2339 IEM_MC_END();
2340 }
2341 return VINF_SUCCESS;
2342}
2343
2344/* Opcode 0xf3 0x0f 0x28 - invalid */
2345/* Opcode 0xf2 0x0f 0x28 - invalid */
2346
2347/**
2348 * @opcode 0x29
2349 * @oppfx none
2350 * @opcpuid sse
2351 * @opgroup og_sse_simdfp_datamove
2352 * @opxcpttype 1
2353 * @optest op1=1 op2=2 -> op1=2
2354 * @optest op1=0 op2=-42 -> op1=-42
2355 */
2356FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2357{
2358 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2359 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2360 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2361 {
2362 /*
2363 * Register, register.
2364 */
2365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2366 IEM_MC_BEGIN(0, 0);
2367 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2368 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2369 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2370 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2371 IEM_MC_ADVANCE_RIP();
2372 IEM_MC_END();
2373 }
2374 else
2375 {
2376 /*
2377 * Memory, register.
2378 */
2379 IEM_MC_BEGIN(0, 2);
2380 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2382
2383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2385 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2386 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2387
2388 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2389 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2390
2391 IEM_MC_ADVANCE_RIP();
2392 IEM_MC_END();
2393 }
2394 return VINF_SUCCESS;
2395}
2396
2397/**
2398 * @opcode 0x29
2399 * @oppfx 66
2400 * @opcpuid sse2
2401 * @opgroup og_sse2_pcksclr_datamove
2402 * @opxcpttype 1
2403 * @optest op1=1 op2=2 -> op1=2
2404 * @optest op1=0 op2=-42 -> op1=-42
2405 */
2406FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2407{
2408 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2409 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2410 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2411 {
2412 /*
2413 * Register, register.
2414 */
2415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2416 IEM_MC_BEGIN(0, 0);
2417 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2418 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2419 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2420 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2421 IEM_MC_ADVANCE_RIP();
2422 IEM_MC_END();
2423 }
2424 else
2425 {
2426 /*
2427 * Memory, register.
2428 */
2429 IEM_MC_BEGIN(0, 2);
2430 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2432
2433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2435 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2436 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2437
2438 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2439 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2440
2441 IEM_MC_ADVANCE_RIP();
2442 IEM_MC_END();
2443 }
2444 return VINF_SUCCESS;
2445}
2446
2447/* Opcode 0xf3 0x0f 0x29 - invalid */
2448/* Opcode 0xf2 0x0f 0x29 - invalid */
2449
2450
2451/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2452FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2453/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2454FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2455/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2456FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2457/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2458FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2459
2460
2461/**
2462 * @opcode 0x2b
2463 * @opcodesub !11 mr/reg
2464 * @oppfx none
2465 * @opcpuid sse
2466 * @opgroup og_sse1_cachect
2467 * @opxcpttype 1
2468 * @optest op1=1 op2=2 -> op1=2
2469 * @optest op1=0 op2=-42 -> op1=-42
2470 */
2471FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2472{
2473 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2474 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2475 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2476 {
2477 /*
2478 * memory, register.
2479 */
2480 IEM_MC_BEGIN(0, 2);
2481 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2483
2484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2486 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2487 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2488
2489 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2490 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2491
2492 IEM_MC_ADVANCE_RIP();
2493 IEM_MC_END();
2494 }
2495 /* The register, register encoding is invalid. */
2496 else
2497 return IEMOP_RAISE_INVALID_OPCODE();
2498 return VINF_SUCCESS;
2499}
2500
2501/**
2502 * @opcode 0x2b
2503 * @opcodesub !11 mr/reg
2504 * @oppfx 0x66
2505 * @opcpuid sse2
2506 * @opgroup og_sse2_cachect
2507 * @opxcpttype 1
2508 * @optest op1=1 op2=2 -> op1=2
2509 * @optest op1=0 op2=-42 -> op1=-42
2510 */
2511FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2512{
2513 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2515 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2516 {
2517 /*
2518 * memory, register.
2519 */
2520 IEM_MC_BEGIN(0, 2);
2521 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2523
2524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2526 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2527 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2528
2529 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2530 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2531
2532 IEM_MC_ADVANCE_RIP();
2533 IEM_MC_END();
2534 }
2535 /* The register, register encoding is invalid. */
2536 else
2537 return IEMOP_RAISE_INVALID_OPCODE();
2538 return VINF_SUCCESS;
2539}
2540/* Opcode 0xf3 0x0f 0x2b - invalid */
2541/* Opcode 0xf2 0x0f 0x2b - invalid */
2542
2543
2544/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2545FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2546/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2547FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2548/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2549FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2550/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2551FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2552
2553/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2554FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2555/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2556FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2557/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2558FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2559/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2560FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2561
2562/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2563FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2564/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2565FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2566/* Opcode 0xf3 0x0f 0x2e - invalid */
2567/* Opcode 0xf2 0x0f 0x2e - invalid */
2568
2569/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2570FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2571/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2572FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2573/* Opcode 0xf3 0x0f 0x2f - invalid */
2574/* Opcode 0xf2 0x0f 0x2f - invalid */
2575
2576/** Opcode 0x0f 0x30. */
2577FNIEMOP_DEF(iemOp_wrmsr)
2578{
2579 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2581 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2582}
2583
2584
2585/** Opcode 0x0f 0x31. */
2586FNIEMOP_DEF(iemOp_rdtsc)
2587{
2588 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2590 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2591}
2592
2593
2594/** Opcode 0x0f 0x33. */
2595FNIEMOP_DEF(iemOp_rdmsr)
2596{
2597 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2599 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2600}
2601
2602
2603/** Opcode 0x0f 0x34. */
2604FNIEMOP_DEF(iemOp_rdpmc)
2605{
2606 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2608 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2609}
2610
2611
2612/** Opcode 0x0f 0x34. */
2613FNIEMOP_STUB(iemOp_sysenter);
2614/** Opcode 0x0f 0x35. */
2615FNIEMOP_STUB(iemOp_sysexit);
2616/** Opcode 0x0f 0x37. */
2617FNIEMOP_STUB(iemOp_getsec);
2618
2619
2620/** Opcode 0x0f 0x38. */
2621FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2622{
2623#ifdef IEM_WITH_THREE_0F_38
2624 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2625 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2626#else
2627 IEMOP_BITCH_ABOUT_STUB();
2628 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2629#endif
2630}
2631
2632
2633/** Opcode 0x0f 0x3a. */
2634FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2635{
2636#ifdef IEM_WITH_THREE_0F_3A
2637 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2638 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2639#else
2640 IEMOP_BITCH_ABOUT_STUB();
2641 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2642#endif
2643}
2644
2645
2646/**
2647 * Implements a conditional move.
2648 *
2649 * Wish there was an obvious way to do this where we could share and reduce
2650 * code bloat.
2651 *
2652 * @param a_Cnd The conditional "microcode" operation.
2653 */
2654#define CMOV_X(a_Cnd) \
2655 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2656 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2657 { \
2658 switch (pVCpu->iem.s.enmEffOpSize) \
2659 { \
2660 case IEMMODE_16BIT: \
2661 IEM_MC_BEGIN(0, 1); \
2662 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2663 a_Cnd { \
2664 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2665 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2666 } IEM_MC_ENDIF(); \
2667 IEM_MC_ADVANCE_RIP(); \
2668 IEM_MC_END(); \
2669 return VINF_SUCCESS; \
2670 \
2671 case IEMMODE_32BIT: \
2672 IEM_MC_BEGIN(0, 1); \
2673 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2674 a_Cnd { \
2675 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2676 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2677 } IEM_MC_ELSE() { \
2678 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2679 } IEM_MC_ENDIF(); \
2680 IEM_MC_ADVANCE_RIP(); \
2681 IEM_MC_END(); \
2682 return VINF_SUCCESS; \
2683 \
2684 case IEMMODE_64BIT: \
2685 IEM_MC_BEGIN(0, 1); \
2686 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2687 a_Cnd { \
2688 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2689 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2690 } IEM_MC_ENDIF(); \
2691 IEM_MC_ADVANCE_RIP(); \
2692 IEM_MC_END(); \
2693 return VINF_SUCCESS; \
2694 \
2695 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2696 } \
2697 } \
2698 else \
2699 { \
2700 switch (pVCpu->iem.s.enmEffOpSize) \
2701 { \
2702 case IEMMODE_16BIT: \
2703 IEM_MC_BEGIN(0, 2); \
2704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2705 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2707 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2708 a_Cnd { \
2709 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2710 } IEM_MC_ENDIF(); \
2711 IEM_MC_ADVANCE_RIP(); \
2712 IEM_MC_END(); \
2713 return VINF_SUCCESS; \
2714 \
2715 case IEMMODE_32BIT: \
2716 IEM_MC_BEGIN(0, 2); \
2717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2718 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2720 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2721 a_Cnd { \
2722 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2723 } IEM_MC_ELSE() { \
2724 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2725 } IEM_MC_ENDIF(); \
2726 IEM_MC_ADVANCE_RIP(); \
2727 IEM_MC_END(); \
2728 return VINF_SUCCESS; \
2729 \
2730 case IEMMODE_64BIT: \
2731 IEM_MC_BEGIN(0, 2); \
2732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2733 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2735 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2736 a_Cnd { \
2737 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2738 } IEM_MC_ENDIF(); \
2739 IEM_MC_ADVANCE_RIP(); \
2740 IEM_MC_END(); \
2741 return VINF_SUCCESS; \
2742 \
2743 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2744 } \
2745 } do {} while (0)
2746
2747
2748
2749/** Opcode 0x0f 0x40. */
2750FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2751{
2752 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2753 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2754}
2755
2756
2757/** Opcode 0x0f 0x41. */
2758FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2759{
2760 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2761 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2762}
2763
2764
2765/** Opcode 0x0f 0x42. */
2766FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2767{
2768 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2769 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2770}
2771
2772
2773/** Opcode 0x0f 0x43. */
2774FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2775{
2776 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2777 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2778}
2779
2780
2781/** Opcode 0x0f 0x44. */
2782FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2783{
2784 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2785 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2786}
2787
2788
2789/** Opcode 0x0f 0x45. */
2790FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2791{
2792 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2793 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2794}
2795
2796
2797/** Opcode 0x0f 0x46. */
2798FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2799{
2800 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2801 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2802}
2803
2804
2805/** Opcode 0x0f 0x47. */
2806FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2807{
2808 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2809 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2810}
2811
2812
2813/** Opcode 0x0f 0x48. */
2814FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2815{
2816 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2817 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2818}
2819
2820
2821/** Opcode 0x0f 0x49. */
2822FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2823{
2824 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2825 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2826}
2827
2828
2829/** Opcode 0x0f 0x4a. */
2830FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2831{
2832 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2833 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2834}
2835
2836
2837/** Opcode 0x0f 0x4b. */
2838FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2839{
2840 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2841 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2842}
2843
2844
2845/** Opcode 0x0f 0x4c. */
2846FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2847{
2848 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2849 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2850}
2851
2852
2853/** Opcode 0x0f 0x4d. */
2854FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2855{
2856 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2857 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2858}
2859
2860
2861/** Opcode 0x0f 0x4e. */
2862FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2863{
2864 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2865 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2866}
2867
2868
2869/** Opcode 0x0f 0x4f. */
2870FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2871{
2872 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2873 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2874}
2875
2876#undef CMOV_X
2877
2878/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2879FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2880/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2881FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2882/* Opcode 0xf3 0x0f 0x50 - invalid */
2883/* Opcode 0xf2 0x0f 0x50 - invalid */
2884
2885/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2886FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2887/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2888FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2889/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2890FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2891/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2892FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2893
2894/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2895FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2896/* Opcode 0x66 0x0f 0x52 - invalid */
2897/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2898FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2899/* Opcode 0xf2 0x0f 0x52 - invalid */
2900
2901/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2902FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2903/* Opcode 0x66 0x0f 0x53 - invalid */
2904/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2905FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2906/* Opcode 0xf2 0x0f 0x53 - invalid */
2907
2908/** Opcode 0x0f 0x54 - andps Vps, Wps */
2909FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2910/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2911FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2912/* Opcode 0xf3 0x0f 0x54 - invalid */
2913/* Opcode 0xf2 0x0f 0x54 - invalid */
2914
2915/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2916FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2917/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2918FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2919/* Opcode 0xf3 0x0f 0x55 - invalid */
2920/* Opcode 0xf2 0x0f 0x55 - invalid */
2921
2922/** Opcode 0x0f 0x56 - orps Vps, Wps */
2923FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2924/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2925FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2926/* Opcode 0xf3 0x0f 0x56 - invalid */
2927/* Opcode 0xf2 0x0f 0x56 - invalid */
2928
2929/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2930FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2931/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2932FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2933/* Opcode 0xf3 0x0f 0x57 - invalid */
2934/* Opcode 0xf2 0x0f 0x57 - invalid */
2935
2936/** Opcode 0x0f 0x58 - addps Vps, Wps */
2937FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2938/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2939FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2940/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2941FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2942/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2943FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2944
2945/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2946FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2947/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2948FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2949/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2950FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2951/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2952FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2953
2954/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2955FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2956/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2957FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2958/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2959FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2960/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2961FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2962
2963/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2964FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2965/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2966FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2967/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2968FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2969/* Opcode 0xf2 0x0f 0x5b - invalid */
2970
2971/** Opcode 0x0f 0x5c - subps Vps, Wps */
2972FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2973/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2974FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2975/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2976FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2977/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2978FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2979
2980/** Opcode 0x0f 0x5d - minps Vps, Wps */
2981FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2982/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2983FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2984/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2985FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2986/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2987FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2988
2989/** Opcode 0x0f 0x5e - divps Vps, Wps */
2990FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2991/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2992FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2993/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2994FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2995/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2996FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2997
2998/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2999FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
3000/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
3001FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
3002/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
3003FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
3004/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
3005FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
3006
3007/**
3008 * Common worker for MMX instructions on the forms:
3009 * pxxxx mm1, mm2/mem32
3010 *
3011 * The 2nd operand is the first half of a register, which in the memory case
3012 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3013 * memory accessed for MMX.
3014 *
3015 * Exceptions type 4.
3016 */
3017FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3018{
3019 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3020 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3021 {
3022 /*
3023 * Register, register.
3024 */
3025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3026 IEM_MC_BEGIN(2, 0);
3027 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3028 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3029 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3030 IEM_MC_PREPARE_SSE_USAGE();
3031 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3032 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3033 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3034 IEM_MC_ADVANCE_RIP();
3035 IEM_MC_END();
3036 }
3037 else
3038 {
3039 /*
3040 * Register, memory.
3041 */
3042 IEM_MC_BEGIN(2, 2);
3043 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3044 IEM_MC_LOCAL(uint64_t, uSrc);
3045 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3047
3048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3050 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3051 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3052
3053 IEM_MC_PREPARE_SSE_USAGE();
3054 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3055 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3056
3057 IEM_MC_ADVANCE_RIP();
3058 IEM_MC_END();
3059 }
3060 return VINF_SUCCESS;
3061}
3062
3063
3064/**
3065 * Common worker for SSE2 instructions on the forms:
3066 * pxxxx xmm1, xmm2/mem128
3067 *
3068 * The 2nd operand is the first half of a register, which in the memory case
3069 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3070 * memory accessed for MMX.
3071 *
3072 * Exceptions type 4.
3073 */
3074FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3075{
3076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3077 if (!pImpl->pfnU64)
3078 return IEMOP_RAISE_INVALID_OPCODE();
3079 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3080 {
3081 /*
3082 * Register, register.
3083 */
3084 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3085 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3087 IEM_MC_BEGIN(2, 0);
3088 IEM_MC_ARG(uint64_t *, pDst, 0);
3089 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3090 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3091 IEM_MC_PREPARE_FPU_USAGE();
3092 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3093 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3094 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3095 IEM_MC_ADVANCE_RIP();
3096 IEM_MC_END();
3097 }
3098 else
3099 {
3100 /*
3101 * Register, memory.
3102 */
3103 IEM_MC_BEGIN(2, 2);
3104 IEM_MC_ARG(uint64_t *, pDst, 0);
3105 IEM_MC_LOCAL(uint32_t, uSrc);
3106 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3108
3109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3111 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3112 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3113
3114 IEM_MC_PREPARE_FPU_USAGE();
3115 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3116 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3117
3118 IEM_MC_ADVANCE_RIP();
3119 IEM_MC_END();
3120 }
3121 return VINF_SUCCESS;
3122}
3123
3124
3125/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3126FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3127{
3128 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3129 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3130}
3131
3132/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3133FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3134{
3135 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3136 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3137}
3138
3139/* Opcode 0xf3 0x0f 0x60 - invalid */
3140
3141
3142/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3143FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3144{
3145 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3146 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3147}
3148
3149/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3150FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3151{
3152 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3153 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3154}
3155
3156/* Opcode 0xf3 0x0f 0x61 - invalid */
3157
3158
3159/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3160FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3161{
3162 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3163 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3164}
3165
3166/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3167FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3168{
3169 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3170 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3171}
3172
3173/* Opcode 0xf3 0x0f 0x62 - invalid */
3174
3175
3176
3177/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3178FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3179/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3180FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3181/* Opcode 0xf3 0x0f 0x63 - invalid */
3182
3183/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3184FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3185/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3186FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3187/* Opcode 0xf3 0x0f 0x64 - invalid */
3188
3189/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3190FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3191/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3192FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3193/* Opcode 0xf3 0x0f 0x65 - invalid */
3194
3195/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3196FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3197/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3198FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3199/* Opcode 0xf3 0x0f 0x66 - invalid */
3200
3201/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3202FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3203/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3204FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3205/* Opcode 0xf3 0x0f 0x67 - invalid */
3206
3207
3208/**
3209 * Common worker for MMX instructions on the form:
3210 * pxxxx mm1, mm2/mem64
3211 *
3212 * The 2nd operand is the second half of a register, which in the memory case
3213 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3214 * where it may read the full 128 bits or only the upper 64 bits.
3215 *
3216 * Exceptions type 4.
3217 */
3218FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3219{
3220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3221 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3222 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3223 {
3224 /*
3225 * Register, register.
3226 */
3227 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3228 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3230 IEM_MC_BEGIN(2, 0);
3231 IEM_MC_ARG(uint64_t *, pDst, 0);
3232 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3233 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3234 IEM_MC_PREPARE_FPU_USAGE();
3235 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3236 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3237 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3238 IEM_MC_ADVANCE_RIP();
3239 IEM_MC_END();
3240 }
3241 else
3242 {
3243 /*
3244 * Register, memory.
3245 */
3246 IEM_MC_BEGIN(2, 2);
3247 IEM_MC_ARG(uint64_t *, pDst, 0);
3248 IEM_MC_LOCAL(uint64_t, uSrc);
3249 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3251
3252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3254 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3255 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3256
3257 IEM_MC_PREPARE_FPU_USAGE();
3258 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3259 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3260
3261 IEM_MC_ADVANCE_RIP();
3262 IEM_MC_END();
3263 }
3264 return VINF_SUCCESS;
3265}
3266
3267
3268/**
3269 * Common worker for SSE2 instructions on the form:
3270 * pxxxx xmm1, xmm2/mem128
3271 *
3272 * The 2nd operand is the second half of a register, which in the memory case
3273 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3274 * where it may read the full 128 bits or only the upper 64 bits.
3275 *
3276 * Exceptions type 4.
3277 */
3278FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3279{
3280 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3281 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3282 {
3283 /*
3284 * Register, register.
3285 */
3286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3287 IEM_MC_BEGIN(2, 0);
3288 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3289 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3290 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3291 IEM_MC_PREPARE_SSE_USAGE();
3292 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3293 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3294 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3295 IEM_MC_ADVANCE_RIP();
3296 IEM_MC_END();
3297 }
3298 else
3299 {
3300 /*
3301 * Register, memory.
3302 */
3303 IEM_MC_BEGIN(2, 2);
3304 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3305 IEM_MC_LOCAL(RTUINT128U, uSrc);
3306 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3308
3309 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3311 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3312 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3313
3314 IEM_MC_PREPARE_SSE_USAGE();
3315 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3316 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3317
3318 IEM_MC_ADVANCE_RIP();
3319 IEM_MC_END();
3320 }
3321 return VINF_SUCCESS;
3322}
3323
3324
3325/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3326FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3327{
3328 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3329 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3330}
3331
3332/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3333FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3334{
3335 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3336 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3337}
3338/* Opcode 0xf3 0x0f 0x68 - invalid */
3339
3340
3341/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3342FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3343{
3344 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3345 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3346}
3347
3348/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3349FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3350{
3351 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3352 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3353
3354}
3355/* Opcode 0xf3 0x0f 0x69 - invalid */
3356
3357
3358/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3359FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3360{
3361 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3362 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3363}
3364
3365/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3366FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3367{
3368 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3369 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3370}
3371/* Opcode 0xf3 0x0f 0x6a - invalid */
3372
3373
3374/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3375FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3376/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3377FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3378/* Opcode 0xf3 0x0f 0x6b - invalid */
3379
3380
3381/* Opcode 0x0f 0x6c - invalid */
3382
3383/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3384FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3385{
3386 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3387 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3388}
3389
3390/* Opcode 0xf3 0x0f 0x6c - invalid */
3391/* Opcode 0xf2 0x0f 0x6c - invalid */
3392
3393
3394/* Opcode 0x0f 0x6d - invalid */
3395
3396/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3397FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3398{
3399 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3400 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3401}
3402
3403/* Opcode 0xf3 0x0f 0x6d - invalid */
3404
3405
3406FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3407{
3408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3409 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3410 {
3411 /**
3412 * @opcode 0x6e
3413 * @opcodesub rex.w=1
3414 * @oppfx none
3415 * @opcpuid mmx
3416 * @opgroup og_mmx_datamove
3417 * @opxcpttype 5
3418 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3419 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3420 */
3421 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3422 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3423 {
3424 /* MMX, greg64 */
3425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3426 IEM_MC_BEGIN(0, 1);
3427 IEM_MC_LOCAL(uint64_t, u64Tmp);
3428
3429 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3430 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3431
3432 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3433 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3434 IEM_MC_FPU_TO_MMX_MODE();
3435
3436 IEM_MC_ADVANCE_RIP();
3437 IEM_MC_END();
3438 }
3439 else
3440 {
3441 /* MMX, [mem64] */
3442 IEM_MC_BEGIN(0, 2);
3443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3444 IEM_MC_LOCAL(uint64_t, u64Tmp);
3445
3446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3448 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3449 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3450
3451 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3452 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3453 IEM_MC_FPU_TO_MMX_MODE();
3454
3455 IEM_MC_ADVANCE_RIP();
3456 IEM_MC_END();
3457 }
3458 }
3459 else
3460 {
3461 /**
3462 * @opdone
3463 * @opcode 0x6e
3464 * @opcodesub rex.w=0
3465 * @oppfx none
3466 * @opcpuid mmx
3467 * @opgroup og_mmx_datamove
3468 * @opxcpttype 5
3469 * @opfunction iemOp_movd_q_Pd_Ey
3470 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3471 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3472 */
3473 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3474 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3475 {
3476 /* MMX, greg */
3477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3478 IEM_MC_BEGIN(0, 1);
3479 IEM_MC_LOCAL(uint64_t, u64Tmp);
3480
3481 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3482 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3483
3484 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3485 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3486 IEM_MC_FPU_TO_MMX_MODE();
3487
3488 IEM_MC_ADVANCE_RIP();
3489 IEM_MC_END();
3490 }
3491 else
3492 {
3493 /* MMX, [mem] */
3494 IEM_MC_BEGIN(0, 2);
3495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3496 IEM_MC_LOCAL(uint32_t, u32Tmp);
3497
3498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3500 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3501 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3502
3503 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3504 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3505 IEM_MC_FPU_TO_MMX_MODE();
3506
3507 IEM_MC_ADVANCE_RIP();
3508 IEM_MC_END();
3509 }
3510 }
3511 return VINF_SUCCESS;
3512}
3513
3514FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3515{
3516 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3517 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3518 {
3519 /**
3520 * @opcode 0x6e
3521 * @opcodesub rex.w=1
3522 * @oppfx 0x66
3523 * @opcpuid sse2
3524 * @opgroup og_sse2_simdint_datamove
3525 * @opxcpttype 5
3526 * @optest 64-bit / op1=1 op2=2 -> op1=2
3527 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3528 */
3529 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3530 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3531 {
3532 /* XMM, greg64 */
3533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3534 IEM_MC_BEGIN(0, 1);
3535 IEM_MC_LOCAL(uint64_t, u64Tmp);
3536
3537 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3538 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3539
3540 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3541 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3542
3543 IEM_MC_ADVANCE_RIP();
3544 IEM_MC_END();
3545 }
3546 else
3547 {
3548 /* XMM, [mem64] */
3549 IEM_MC_BEGIN(0, 2);
3550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3551 IEM_MC_LOCAL(uint64_t, u64Tmp);
3552
3553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3555 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3556 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3557
3558 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3559 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3560
3561 IEM_MC_ADVANCE_RIP();
3562 IEM_MC_END();
3563 }
3564 }
3565 else
3566 {
3567 /**
3568 * @opdone
3569 * @opcode 0x6e
3570 * @opcodesub rex.w=0
3571 * @oppfx 0x66
3572 * @opcpuid sse2
3573 * @opgroup og_sse2_simdint_datamove
3574 * @opxcpttype 5
3575 * @opfunction iemOp_movd_q_Vy_Ey
3576 * @optest op1=1 op2=2 -> op1=2
3577 * @optest op1=0 op2=-42 -> op1=-42
3578 */
3579 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3580 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3581 {
3582 /* XMM, greg32 */
3583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3584 IEM_MC_BEGIN(0, 1);
3585 IEM_MC_LOCAL(uint32_t, u32Tmp);
3586
3587 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3588 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3589
3590 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3591 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3592
3593 IEM_MC_ADVANCE_RIP();
3594 IEM_MC_END();
3595 }
3596 else
3597 {
3598 /* XMM, [mem32] */
3599 IEM_MC_BEGIN(0, 2);
3600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3601 IEM_MC_LOCAL(uint32_t, u32Tmp);
3602
3603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3605 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3606 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3607
3608 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3609 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3610
3611 IEM_MC_ADVANCE_RIP();
3612 IEM_MC_END();
3613 }
3614 }
3615 return VINF_SUCCESS;
3616}
3617
3618/* Opcode 0xf3 0x0f 0x6e - invalid */
3619
3620
3621/**
3622 * @opcode 0x6f
3623 * @oppfx none
3624 * @opcpuid mmx
3625 * @opgroup og_mmx_datamove
3626 * @opxcpttype 5
3627 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3628 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3629 */
3630FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3631{
3632 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3634 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3635 {
3636 /*
3637 * Register, register.
3638 */
3639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3640 IEM_MC_BEGIN(0, 1);
3641 IEM_MC_LOCAL(uint64_t, u64Tmp);
3642
3643 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3644 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3645
3646 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3647 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3648 IEM_MC_FPU_TO_MMX_MODE();
3649
3650 IEM_MC_ADVANCE_RIP();
3651 IEM_MC_END();
3652 }
3653 else
3654 {
3655 /*
3656 * Register, memory.
3657 */
3658 IEM_MC_BEGIN(0, 2);
3659 IEM_MC_LOCAL(uint64_t, u64Tmp);
3660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3661
3662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3664 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3665 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3666
3667 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3668 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3669 IEM_MC_FPU_TO_MMX_MODE();
3670
3671 IEM_MC_ADVANCE_RIP();
3672 IEM_MC_END();
3673 }
3674 return VINF_SUCCESS;
3675}
3676
3677/**
3678 * @opcode 0x6f
3679 * @oppfx 0x66
3680 * @opcpuid sse2
3681 * @opgroup og_sse2_simdint_datamove
3682 * @opxcpttype 1
3683 * @optest op1=1 op2=2 -> op1=2
3684 * @optest op1=0 op2=-42 -> op1=-42
3685 */
3686FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
3687{
3688 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3690 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3691 {
3692 /*
3693 * Register, register.
3694 */
3695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3696 IEM_MC_BEGIN(0, 0);
3697
3698 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3699 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3700
3701 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3702 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3703 IEM_MC_ADVANCE_RIP();
3704 IEM_MC_END();
3705 }
3706 else
3707 {
3708 /*
3709 * Register, memory.
3710 */
3711 IEM_MC_BEGIN(0, 2);
3712 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3714
3715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3717 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3718 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3719
3720 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3721 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3722
3723 IEM_MC_ADVANCE_RIP();
3724 IEM_MC_END();
3725 }
3726 return VINF_SUCCESS;
3727}
3728
3729/**
3730 * @opcode 0x6f
3731 * @oppfx 0xf3
3732 * @opcpuid sse2
3733 * @opgroup og_sse2_simdint_datamove
3734 * @opxcpttype 4UA
3735 * @optest op1=1 op2=2 -> op1=2
3736 * @optest op1=0 op2=-42 -> op1=-42
3737 */
3738FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
3739{
3740 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3741 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3742 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3743 {
3744 /*
3745 * Register, register.
3746 */
3747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3748 IEM_MC_BEGIN(0, 0);
3749 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3750 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3751 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3752 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3753 IEM_MC_ADVANCE_RIP();
3754 IEM_MC_END();
3755 }
3756 else
3757 {
3758 /*
3759 * Register, memory.
3760 */
3761 IEM_MC_BEGIN(0, 2);
3762 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3764
3765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3767 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3768 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3769 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3770 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3771
3772 IEM_MC_ADVANCE_RIP();
3773 IEM_MC_END();
3774 }
3775 return VINF_SUCCESS;
3776}
3777
3778
3779/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3780FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3781{
3782 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3783 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3784 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3785 {
3786 /*
3787 * Register, register.
3788 */
3789 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3791
3792 IEM_MC_BEGIN(3, 0);
3793 IEM_MC_ARG(uint64_t *, pDst, 0);
3794 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3795 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3796 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3797 IEM_MC_PREPARE_FPU_USAGE();
3798 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3799 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3800 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3801 IEM_MC_ADVANCE_RIP();
3802 IEM_MC_END();
3803 }
3804 else
3805 {
3806 /*
3807 * Register, memory.
3808 */
3809 IEM_MC_BEGIN(3, 2);
3810 IEM_MC_ARG(uint64_t *, pDst, 0);
3811 IEM_MC_LOCAL(uint64_t, uSrc);
3812 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3814
3815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3816 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3817 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3819 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3820
3821 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3822 IEM_MC_PREPARE_FPU_USAGE();
3823 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3824 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3825
3826 IEM_MC_ADVANCE_RIP();
3827 IEM_MC_END();
3828 }
3829 return VINF_SUCCESS;
3830}
3831
3832/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3833FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3834{
3835 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3836 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3837 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3838 {
3839 /*
3840 * Register, register.
3841 */
3842 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3844
3845 IEM_MC_BEGIN(3, 0);
3846 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3847 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3848 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3849 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3850 IEM_MC_PREPARE_SSE_USAGE();
3851 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3852 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3853 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3854 IEM_MC_ADVANCE_RIP();
3855 IEM_MC_END();
3856 }
3857 else
3858 {
3859 /*
3860 * Register, memory.
3861 */
3862 IEM_MC_BEGIN(3, 2);
3863 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3864 IEM_MC_LOCAL(RTUINT128U, uSrc);
3865 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3867
3868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3869 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3870 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3872 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3873
3874 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3875 IEM_MC_PREPARE_SSE_USAGE();
3876 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3877 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3878
3879 IEM_MC_ADVANCE_RIP();
3880 IEM_MC_END();
3881 }
3882 return VINF_SUCCESS;
3883}
3884
3885/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3886FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3887{
3888 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3890 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3891 {
3892 /*
3893 * Register, register.
3894 */
3895 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3897
3898 IEM_MC_BEGIN(3, 0);
3899 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3900 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3901 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3902 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3903 IEM_MC_PREPARE_SSE_USAGE();
3904 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3905 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3906 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3907 IEM_MC_ADVANCE_RIP();
3908 IEM_MC_END();
3909 }
3910 else
3911 {
3912 /*
3913 * Register, memory.
3914 */
3915 IEM_MC_BEGIN(3, 2);
3916 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3917 IEM_MC_LOCAL(RTUINT128U, uSrc);
3918 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3920
3921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3922 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3923 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3925 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3926
3927 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3928 IEM_MC_PREPARE_SSE_USAGE();
3929 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3930 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3931
3932 IEM_MC_ADVANCE_RIP();
3933 IEM_MC_END();
3934 }
3935 return VINF_SUCCESS;
3936}
3937
3938/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3939FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3940{
3941 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3943 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3944 {
3945 /*
3946 * Register, register.
3947 */
3948 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3950
3951 IEM_MC_BEGIN(3, 0);
3952 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3953 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3954 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3955 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3956 IEM_MC_PREPARE_SSE_USAGE();
3957 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3958 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3959 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3960 IEM_MC_ADVANCE_RIP();
3961 IEM_MC_END();
3962 }
3963 else
3964 {
3965 /*
3966 * Register, memory.
3967 */
3968 IEM_MC_BEGIN(3, 2);
3969 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3970 IEM_MC_LOCAL(RTUINT128U, uSrc);
3971 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3973
3974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3975 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3976 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3978 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3979
3980 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3981 IEM_MC_PREPARE_SSE_USAGE();
3982 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3983 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3984
3985 IEM_MC_ADVANCE_RIP();
3986 IEM_MC_END();
3987 }
3988 return VINF_SUCCESS;
3989}
3990
3991
3992/** Opcode 0x0f 0x71 11/2. */
3993FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3994
3995/** Opcode 0x66 0x0f 0x71 11/2. */
3996FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3997
3998/** Opcode 0x0f 0x71 11/4. */
3999FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
4000
4001/** Opcode 0x66 0x0f 0x71 11/4. */
4002FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
4003
4004/** Opcode 0x0f 0x71 11/6. */
4005FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
4006
4007/** Opcode 0x66 0x0f 0x71 11/6. */
4008FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
4009
4010
4011/**
4012 * Group 12 jump table for register variant.
4013 */
4014IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
4015{
4016 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4017 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4018 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4019 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4020 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4021 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4022 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4023 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4024};
4025AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
4026
4027
4028/** Opcode 0x0f 0x71. */
4029FNIEMOP_DEF(iemOp_Grp12)
4030{
4031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4032 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4033 /* register, register */
4034 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4035 + pVCpu->iem.s.idxPrefix], bRm);
4036 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4037}
4038
4039
4040/** Opcode 0x0f 0x72 11/2. */
4041FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
4042
4043/** Opcode 0x66 0x0f 0x72 11/2. */
4044FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
4045
4046/** Opcode 0x0f 0x72 11/4. */
4047FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
4048
4049/** Opcode 0x66 0x0f 0x72 11/4. */
4050FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4051
4052/** Opcode 0x0f 0x72 11/6. */
4053FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4054
4055/** Opcode 0x66 0x0f 0x72 11/6. */
4056FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4057
4058
4059/**
4060 * Group 13 jump table for register variant.
4061 */
4062IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4063{
4064 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4065 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4066 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4067 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4068 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4069 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4070 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4071 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4072};
4073AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4074
4075/** Opcode 0x0f 0x72. */
4076FNIEMOP_DEF(iemOp_Grp13)
4077{
4078 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4079 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4080 /* register, register */
4081 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4082 + pVCpu->iem.s.idxPrefix], bRm);
4083 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4084}
4085
4086
4087/** Opcode 0x0f 0x73 11/2. */
4088FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4089
4090/** Opcode 0x66 0x0f 0x73 11/2. */
4091FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4092
4093/** Opcode 0x66 0x0f 0x73 11/3. */
4094FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4095
4096/** Opcode 0x0f 0x73 11/6. */
4097FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4098
4099/** Opcode 0x66 0x0f 0x73 11/6. */
4100FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4101
4102/** Opcode 0x66 0x0f 0x73 11/7. */
4103FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4104
4105/**
4106 * Group 14 jump table for register variant.
4107 */
4108IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4109{
4110 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4111 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4112 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4113 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4114 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4115 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4116 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4117 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4118};
4119AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4120
4121
4122/** Opcode 0x0f 0x73. */
4123FNIEMOP_DEF(iemOp_Grp14)
4124{
4125 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4126 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4127 /* register, register */
4128 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4129 + pVCpu->iem.s.idxPrefix], bRm);
4130 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4131}
4132
4133
4134/**
4135 * Common worker for MMX instructions on the form:
4136 * pxxx mm1, mm2/mem64
4137 */
4138FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4139{
4140 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4141 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4142 {
4143 /*
4144 * Register, register.
4145 */
4146 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4147 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4149 IEM_MC_BEGIN(2, 0);
4150 IEM_MC_ARG(uint64_t *, pDst, 0);
4151 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4152 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4153 IEM_MC_PREPARE_FPU_USAGE();
4154 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4155 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4156 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4157 IEM_MC_ADVANCE_RIP();
4158 IEM_MC_END();
4159 }
4160 else
4161 {
4162 /*
4163 * Register, memory.
4164 */
4165 IEM_MC_BEGIN(2, 2);
4166 IEM_MC_ARG(uint64_t *, pDst, 0);
4167 IEM_MC_LOCAL(uint64_t, uSrc);
4168 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4170
4171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4173 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4174 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4175
4176 IEM_MC_PREPARE_FPU_USAGE();
4177 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4178 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4179
4180 IEM_MC_ADVANCE_RIP();
4181 IEM_MC_END();
4182 }
4183 return VINF_SUCCESS;
4184}
4185
4186
4187/**
4188 * Common worker for SSE2 instructions on the forms:
4189 * pxxx xmm1, xmm2/mem128
4190 *
4191 * Proper alignment of the 128-bit operand is enforced.
4192 * Exceptions type 4. SSE2 cpuid checks.
4193 */
4194FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4195{
4196 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4197 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4198 {
4199 /*
4200 * Register, register.
4201 */
4202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4203 IEM_MC_BEGIN(2, 0);
4204 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4205 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4206 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4207 IEM_MC_PREPARE_SSE_USAGE();
4208 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4209 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4210 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4211 IEM_MC_ADVANCE_RIP();
4212 IEM_MC_END();
4213 }
4214 else
4215 {
4216 /*
4217 * Register, memory.
4218 */
4219 IEM_MC_BEGIN(2, 2);
4220 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4221 IEM_MC_LOCAL(RTUINT128U, uSrc);
4222 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4224
4225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4227 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4228 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4229
4230 IEM_MC_PREPARE_SSE_USAGE();
4231 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4232 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4233
4234 IEM_MC_ADVANCE_RIP();
4235 IEM_MC_END();
4236 }
4237 return VINF_SUCCESS;
4238}
4239
4240
4241/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4242FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4243{
4244 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4245 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4246}
4247
4248/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4249FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4250{
4251 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4252 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4253}
4254
4255/* Opcode 0xf3 0x0f 0x74 - invalid */
4256/* Opcode 0xf2 0x0f 0x74 - invalid */
4257
4258
4259/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4260FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4261{
4262 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4263 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4264}
4265
4266/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4267FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4268{
4269 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4270 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4271}
4272
4273/* Opcode 0xf3 0x0f 0x75 - invalid */
4274/* Opcode 0xf2 0x0f 0x75 - invalid */
4275
4276
4277/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4278FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4279{
4280 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4281 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4282}
4283
4284/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4285FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4286{
4287 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4288 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4289}
4290
4291/* Opcode 0xf3 0x0f 0x76 - invalid */
4292/* Opcode 0xf2 0x0f 0x76 - invalid */
4293
4294
4295/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4296FNIEMOP_DEF(iemOp_emms)
4297{
4298 IEMOP_MNEMONIC(emms, "emms");
4299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4300
4301 IEM_MC_BEGIN(0,0);
4302 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
4303 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4304 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4305 IEM_MC_FPU_FROM_MMX_MODE();
4306 IEM_MC_ADVANCE_RIP();
4307 IEM_MC_END();
4308 return VINF_SUCCESS;
4309}
4310
4311/* Opcode 0x66 0x0f 0x77 - invalid */
4312/* Opcode 0xf3 0x0f 0x77 - invalid */
4313/* Opcode 0xf2 0x0f 0x77 - invalid */
4314
4315/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4316#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4317FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
4318{
4319 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
4320 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
4321 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
4322 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
4323
4324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4325 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4326 {
4327 /*
4328 * Register, register.
4329 */
4330 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4331 if (enmEffOpSize == IEMMODE_64BIT)
4332 {
4333 IEM_MC_BEGIN(2, 0);
4334 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4335 IEM_MC_ARG(uint64_t, u64Enc, 1);
4336 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4337 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4338 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
4339 IEM_MC_END();
4340 }
4341 else
4342 {
4343 IEM_MC_BEGIN(2, 0);
4344 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4345 IEM_MC_ARG(uint32_t, u32Enc, 1);
4346 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4347 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4348 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
4349 IEM_MC_END();
4350 }
4351 }
4352 else
4353 {
4354 /*
4355 * Memory, register.
4356 */
4357 if (enmEffOpSize == IEMMODE_64BIT)
4358 {
4359 IEM_MC_BEGIN(3, 0);
4360 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4361 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
4362 IEM_MC_ARG(uint64_t, u64Enc, 2);
4363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4364 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4365 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4366 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4367 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
4368 IEM_MC_END();
4369 }
4370 else
4371 {
4372 IEM_MC_BEGIN(3, 0);
4373 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4374 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
4375 IEM_MC_ARG(uint32_t, u32Enc, 2);
4376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4377 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4378 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4379 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4380 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
4381 IEM_MC_END();
4382 }
4383 }
4384 return VINF_SUCCESS;
4385}
4386#else
4387FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4388#endif
4389
4390/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4391FNIEMOP_STUB(iemOp_AmdGrp17);
4392/* Opcode 0xf3 0x0f 0x78 - invalid */
4393/* Opcode 0xf2 0x0f 0x78 - invalid */
4394
4395/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4396#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4397FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
4398{
4399 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
4400 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
4401 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
4402 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
4403
4404 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4405 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4406 {
4407 /*
4408 * Register, register.
4409 */
4410 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4411 if (enmEffOpSize == IEMMODE_64BIT)
4412 {
4413 IEM_MC_BEGIN(2, 0);
4414 IEM_MC_ARG(uint64_t, u64Val, 0);
4415 IEM_MC_ARG(uint64_t, u64Enc, 1);
4416 IEM_MC_FETCH_GREG_U64(u64Val, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4417 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4418 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
4419 IEM_MC_END();
4420 }
4421 else
4422 {
4423 IEM_MC_BEGIN(2, 0);
4424 IEM_MC_ARG(uint32_t, u32Val, 0);
4425 IEM_MC_ARG(uint32_t, u32Enc, 1);
4426 IEM_MC_FETCH_GREG_U32(u32Val, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4427 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4428 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
4429 IEM_MC_END();
4430 }
4431 }
4432 else
4433 {
4434 /*
4435 * Register, memory.
4436 */
4437 if (enmEffOpSize == IEMMODE_64BIT)
4438 {
4439 IEM_MC_BEGIN(3, 0);
4440 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4441 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
4442 IEM_MC_ARG(uint64_t, u64Enc, 2);
4443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4444 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4445 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4446 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4447 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
4448 IEM_MC_END();
4449 }
4450 else
4451 {
4452 IEM_MC_BEGIN(3, 0);
4453 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4454 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
4455 IEM_MC_ARG(uint32_t, u32Enc, 2);
4456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4457 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4458 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4459 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4460 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
4461 IEM_MC_END();
4462 }
4463 }
4464 return VINF_SUCCESS;
4465}
4466#else
4467FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4468#endif
4469/* Opcode 0x66 0x0f 0x79 - invalid */
4470/* Opcode 0xf3 0x0f 0x79 - invalid */
4471/* Opcode 0xf2 0x0f 0x79 - invalid */
4472
4473/* Opcode 0x0f 0x7a - invalid */
4474/* Opcode 0x66 0x0f 0x7a - invalid */
4475/* Opcode 0xf3 0x0f 0x7a - invalid */
4476/* Opcode 0xf2 0x0f 0x7a - invalid */
4477
4478/* Opcode 0x0f 0x7b - invalid */
4479/* Opcode 0x66 0x0f 0x7b - invalid */
4480/* Opcode 0xf3 0x0f 0x7b - invalid */
4481/* Opcode 0xf2 0x0f 0x7b - invalid */
4482
4483/* Opcode 0x0f 0x7c - invalid */
4484/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4485FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4486/* Opcode 0xf3 0x0f 0x7c - invalid */
4487/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4488FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4489
4490/* Opcode 0x0f 0x7d - invalid */
4491/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4492FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4493/* Opcode 0xf3 0x0f 0x7d - invalid */
4494/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4495FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4496
4497
4498/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4499FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4500{
4501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4502 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4503 {
4504 /**
4505 * @opcode 0x7e
4506 * @opcodesub rex.w=1
4507 * @oppfx none
4508 * @opcpuid mmx
4509 * @opgroup og_mmx_datamove
4510 * @opxcpttype 5
4511 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4512 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4513 */
4514 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4515 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4516 {
4517 /* greg64, MMX */
4518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4519 IEM_MC_BEGIN(0, 1);
4520 IEM_MC_LOCAL(uint64_t, u64Tmp);
4521
4522 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4523 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4524
4525 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4526 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4527 IEM_MC_FPU_TO_MMX_MODE();
4528
4529 IEM_MC_ADVANCE_RIP();
4530 IEM_MC_END();
4531 }
4532 else
4533 {
4534 /* [mem64], MMX */
4535 IEM_MC_BEGIN(0, 2);
4536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4537 IEM_MC_LOCAL(uint64_t, u64Tmp);
4538
4539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4541 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4542 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4543
4544 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4545 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4546 IEM_MC_FPU_TO_MMX_MODE();
4547
4548 IEM_MC_ADVANCE_RIP();
4549 IEM_MC_END();
4550 }
4551 }
4552 else
4553 {
4554 /**
4555 * @opdone
4556 * @opcode 0x7e
4557 * @opcodesub rex.w=0
4558 * @oppfx none
4559 * @opcpuid mmx
4560 * @opgroup og_mmx_datamove
4561 * @opxcpttype 5
4562 * @opfunction iemOp_movd_q_Pd_Ey
4563 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4564 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4565 */
4566 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4567 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4568 {
4569 /* greg32, MMX */
4570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4571 IEM_MC_BEGIN(0, 1);
4572 IEM_MC_LOCAL(uint32_t, u32Tmp);
4573
4574 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4575 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4576
4577 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4578 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4579 IEM_MC_FPU_TO_MMX_MODE();
4580
4581 IEM_MC_ADVANCE_RIP();
4582 IEM_MC_END();
4583 }
4584 else
4585 {
4586 /* [mem32], MMX */
4587 IEM_MC_BEGIN(0, 2);
4588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4589 IEM_MC_LOCAL(uint32_t, u32Tmp);
4590
4591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4593 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4594 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4595
4596 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4597 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4598 IEM_MC_FPU_TO_MMX_MODE();
4599
4600 IEM_MC_ADVANCE_RIP();
4601 IEM_MC_END();
4602 }
4603 }
4604 return VINF_SUCCESS;
4605
4606}
4607
4608
4609FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4610{
4611 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4612 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4613 {
4614 /**
4615 * @opcode 0x7e
4616 * @opcodesub rex.w=1
4617 * @oppfx 0x66
4618 * @opcpuid sse2
4619 * @opgroup og_sse2_simdint_datamove
4620 * @opxcpttype 5
4621 * @optest 64-bit / op1=1 op2=2 -> op1=2
4622 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4623 */
4624 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4625 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4626 {
4627 /* greg64, XMM */
4628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4629 IEM_MC_BEGIN(0, 1);
4630 IEM_MC_LOCAL(uint64_t, u64Tmp);
4631
4632 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4633 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4634
4635 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4636 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4637
4638 IEM_MC_ADVANCE_RIP();
4639 IEM_MC_END();
4640 }
4641 else
4642 {
4643 /* [mem64], XMM */
4644 IEM_MC_BEGIN(0, 2);
4645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4646 IEM_MC_LOCAL(uint64_t, u64Tmp);
4647
4648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4650 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4651 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4652
4653 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4654 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4655
4656 IEM_MC_ADVANCE_RIP();
4657 IEM_MC_END();
4658 }
4659 }
4660 else
4661 {
4662 /**
4663 * @opdone
4664 * @opcode 0x7e
4665 * @opcodesub rex.w=0
4666 * @oppfx 0x66
4667 * @opcpuid sse2
4668 * @opgroup og_sse2_simdint_datamove
4669 * @opxcpttype 5
4670 * @opfunction iemOp_movd_q_Vy_Ey
4671 * @optest op1=1 op2=2 -> op1=2
4672 * @optest op1=0 op2=-42 -> op1=-42
4673 */
4674 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4675 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4676 {
4677 /* greg32, XMM */
4678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4679 IEM_MC_BEGIN(0, 1);
4680 IEM_MC_LOCAL(uint32_t, u32Tmp);
4681
4682 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4683 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4684
4685 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4686 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4687
4688 IEM_MC_ADVANCE_RIP();
4689 IEM_MC_END();
4690 }
4691 else
4692 {
4693 /* [mem32], XMM */
4694 IEM_MC_BEGIN(0, 2);
4695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4696 IEM_MC_LOCAL(uint32_t, u32Tmp);
4697
4698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4700 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4701 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4702
4703 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4704 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4705
4706 IEM_MC_ADVANCE_RIP();
4707 IEM_MC_END();
4708 }
4709 }
4710 return VINF_SUCCESS;
4711
4712}
4713
4714/**
4715 * @opcode 0x7e
4716 * @oppfx 0xf3
4717 * @opcpuid sse2
4718 * @opgroup og_sse2_pcksclr_datamove
4719 * @opxcpttype none
4720 * @optest op1=1 op2=2 -> op1=2
4721 * @optest op1=0 op2=-42 -> op1=-42
4722 */
4723FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4724{
4725 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4726 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4727 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4728 {
4729 /*
4730 * Register, register.
4731 */
4732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4733 IEM_MC_BEGIN(0, 2);
4734 IEM_MC_LOCAL(uint64_t, uSrc);
4735
4736 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4737 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4738
4739 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4740 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4741
4742 IEM_MC_ADVANCE_RIP();
4743 IEM_MC_END();
4744 }
4745 else
4746 {
4747 /*
4748 * Memory, register.
4749 */
4750 IEM_MC_BEGIN(0, 2);
4751 IEM_MC_LOCAL(uint64_t, uSrc);
4752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4753
4754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4756 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4757 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4758
4759 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4760 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4761
4762 IEM_MC_ADVANCE_RIP();
4763 IEM_MC_END();
4764 }
4765 return VINF_SUCCESS;
4766}
4767
4768/* Opcode 0xf2 0x0f 0x7e - invalid */
4769
4770
4771/** Opcode 0x0f 0x7f - movq Qq, Pq */
4772FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4773{
4774 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4775 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4776 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4777 {
4778 /*
4779 * Register, register.
4780 */
4781 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4782 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4784 IEM_MC_BEGIN(0, 1);
4785 IEM_MC_LOCAL(uint64_t, u64Tmp);
4786 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4787 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4788 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4789 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4790 IEM_MC_ADVANCE_RIP();
4791 IEM_MC_END();
4792 }
4793 else
4794 {
4795 /*
4796 * Register, memory.
4797 */
4798 IEM_MC_BEGIN(0, 2);
4799 IEM_MC_LOCAL(uint64_t, u64Tmp);
4800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4801
4802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4804 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4805 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4806
4807 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4808 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4809
4810 IEM_MC_ADVANCE_RIP();
4811 IEM_MC_END();
4812 }
4813 return VINF_SUCCESS;
4814}
4815
4816/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4817FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4818{
4819 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4820 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4821 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4822 {
4823 /*
4824 * Register, register.
4825 */
4826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4827 IEM_MC_BEGIN(0, 0);
4828 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4829 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4830 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4831 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4832 IEM_MC_ADVANCE_RIP();
4833 IEM_MC_END();
4834 }
4835 else
4836 {
4837 /*
4838 * Register, memory.
4839 */
4840 IEM_MC_BEGIN(0, 2);
4841 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4843
4844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4846 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4847 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4848
4849 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4850 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4851
4852 IEM_MC_ADVANCE_RIP();
4853 IEM_MC_END();
4854 }
4855 return VINF_SUCCESS;
4856}
4857
4858/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4859FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4860{
4861 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4862 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4863 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4864 {
4865 /*
4866 * Register, register.
4867 */
4868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4869 IEM_MC_BEGIN(0, 0);
4870 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4871 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4872 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4873 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4874 IEM_MC_ADVANCE_RIP();
4875 IEM_MC_END();
4876 }
4877 else
4878 {
4879 /*
4880 * Register, memory.
4881 */
4882 IEM_MC_BEGIN(0, 2);
4883 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4885
4886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4888 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4889 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4890
4891 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4892 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4893
4894 IEM_MC_ADVANCE_RIP();
4895 IEM_MC_END();
4896 }
4897 return VINF_SUCCESS;
4898}
4899
4900/* Opcode 0xf2 0x0f 0x7f - invalid */
4901
4902
4903
4904/** Opcode 0x0f 0x80. */
4905FNIEMOP_DEF(iemOp_jo_Jv)
4906{
4907 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4908 IEMOP_HLP_MIN_386();
4909 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4910 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4911 {
4912 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4914
4915 IEM_MC_BEGIN(0, 0);
4916 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4917 IEM_MC_REL_JMP_S16(i16Imm);
4918 } IEM_MC_ELSE() {
4919 IEM_MC_ADVANCE_RIP();
4920 } IEM_MC_ENDIF();
4921 IEM_MC_END();
4922 }
4923 else
4924 {
4925 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4927
4928 IEM_MC_BEGIN(0, 0);
4929 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4930 IEM_MC_REL_JMP_S32(i32Imm);
4931 } IEM_MC_ELSE() {
4932 IEM_MC_ADVANCE_RIP();
4933 } IEM_MC_ENDIF();
4934 IEM_MC_END();
4935 }
4936 return VINF_SUCCESS;
4937}
4938
4939
4940/** Opcode 0x0f 0x81. */
4941FNIEMOP_DEF(iemOp_jno_Jv)
4942{
4943 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4944 IEMOP_HLP_MIN_386();
4945 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4946 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4947 {
4948 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4950
4951 IEM_MC_BEGIN(0, 0);
4952 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4953 IEM_MC_ADVANCE_RIP();
4954 } IEM_MC_ELSE() {
4955 IEM_MC_REL_JMP_S16(i16Imm);
4956 } IEM_MC_ENDIF();
4957 IEM_MC_END();
4958 }
4959 else
4960 {
4961 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4963
4964 IEM_MC_BEGIN(0, 0);
4965 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4966 IEM_MC_ADVANCE_RIP();
4967 } IEM_MC_ELSE() {
4968 IEM_MC_REL_JMP_S32(i32Imm);
4969 } IEM_MC_ENDIF();
4970 IEM_MC_END();
4971 }
4972 return VINF_SUCCESS;
4973}
4974
4975
4976/** Opcode 0x0f 0x82. */
4977FNIEMOP_DEF(iemOp_jc_Jv)
4978{
4979 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4980 IEMOP_HLP_MIN_386();
4981 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4982 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4983 {
4984 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4986
4987 IEM_MC_BEGIN(0, 0);
4988 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4989 IEM_MC_REL_JMP_S16(i16Imm);
4990 } IEM_MC_ELSE() {
4991 IEM_MC_ADVANCE_RIP();
4992 } IEM_MC_ENDIF();
4993 IEM_MC_END();
4994 }
4995 else
4996 {
4997 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4999
5000 IEM_MC_BEGIN(0, 0);
5001 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5002 IEM_MC_REL_JMP_S32(i32Imm);
5003 } IEM_MC_ELSE() {
5004 IEM_MC_ADVANCE_RIP();
5005 } IEM_MC_ENDIF();
5006 IEM_MC_END();
5007 }
5008 return VINF_SUCCESS;
5009}
5010
5011
5012/** Opcode 0x0f 0x83. */
5013FNIEMOP_DEF(iemOp_jnc_Jv)
5014{
5015 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
5016 IEMOP_HLP_MIN_386();
5017 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5018 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5019 {
5020 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5022
5023 IEM_MC_BEGIN(0, 0);
5024 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5025 IEM_MC_ADVANCE_RIP();
5026 } IEM_MC_ELSE() {
5027 IEM_MC_REL_JMP_S16(i16Imm);
5028 } IEM_MC_ENDIF();
5029 IEM_MC_END();
5030 }
5031 else
5032 {
5033 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5035
5036 IEM_MC_BEGIN(0, 0);
5037 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5038 IEM_MC_ADVANCE_RIP();
5039 } IEM_MC_ELSE() {
5040 IEM_MC_REL_JMP_S32(i32Imm);
5041 } IEM_MC_ENDIF();
5042 IEM_MC_END();
5043 }
5044 return VINF_SUCCESS;
5045}
5046
5047
5048/** Opcode 0x0f 0x84. */
5049FNIEMOP_DEF(iemOp_je_Jv)
5050{
5051 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
5052 IEMOP_HLP_MIN_386();
5053 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5054 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5055 {
5056 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5058
5059 IEM_MC_BEGIN(0, 0);
5060 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5061 IEM_MC_REL_JMP_S16(i16Imm);
5062 } IEM_MC_ELSE() {
5063 IEM_MC_ADVANCE_RIP();
5064 } IEM_MC_ENDIF();
5065 IEM_MC_END();
5066 }
5067 else
5068 {
5069 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5071
5072 IEM_MC_BEGIN(0, 0);
5073 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5074 IEM_MC_REL_JMP_S32(i32Imm);
5075 } IEM_MC_ELSE() {
5076 IEM_MC_ADVANCE_RIP();
5077 } IEM_MC_ENDIF();
5078 IEM_MC_END();
5079 }
5080 return VINF_SUCCESS;
5081}
5082
5083
5084/** Opcode 0x0f 0x85. */
5085FNIEMOP_DEF(iemOp_jne_Jv)
5086{
5087 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
5088 IEMOP_HLP_MIN_386();
5089 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5090 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5091 {
5092 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5094
5095 IEM_MC_BEGIN(0, 0);
5096 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5097 IEM_MC_ADVANCE_RIP();
5098 } IEM_MC_ELSE() {
5099 IEM_MC_REL_JMP_S16(i16Imm);
5100 } IEM_MC_ENDIF();
5101 IEM_MC_END();
5102 }
5103 else
5104 {
5105 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5107
5108 IEM_MC_BEGIN(0, 0);
5109 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5110 IEM_MC_ADVANCE_RIP();
5111 } IEM_MC_ELSE() {
5112 IEM_MC_REL_JMP_S32(i32Imm);
5113 } IEM_MC_ENDIF();
5114 IEM_MC_END();
5115 }
5116 return VINF_SUCCESS;
5117}
5118
5119
5120/** Opcode 0x0f 0x86. */
5121FNIEMOP_DEF(iemOp_jbe_Jv)
5122{
5123 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
5124 IEMOP_HLP_MIN_386();
5125 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5126 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5127 {
5128 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5130
5131 IEM_MC_BEGIN(0, 0);
5132 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5133 IEM_MC_REL_JMP_S16(i16Imm);
5134 } IEM_MC_ELSE() {
5135 IEM_MC_ADVANCE_RIP();
5136 } IEM_MC_ENDIF();
5137 IEM_MC_END();
5138 }
5139 else
5140 {
5141 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5143
5144 IEM_MC_BEGIN(0, 0);
5145 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5146 IEM_MC_REL_JMP_S32(i32Imm);
5147 } IEM_MC_ELSE() {
5148 IEM_MC_ADVANCE_RIP();
5149 } IEM_MC_ENDIF();
5150 IEM_MC_END();
5151 }
5152 return VINF_SUCCESS;
5153}
5154
5155
5156/** Opcode 0x0f 0x87. */
5157FNIEMOP_DEF(iemOp_jnbe_Jv)
5158{
5159 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
5160 IEMOP_HLP_MIN_386();
5161 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5162 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5163 {
5164 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5166
5167 IEM_MC_BEGIN(0, 0);
5168 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5169 IEM_MC_ADVANCE_RIP();
5170 } IEM_MC_ELSE() {
5171 IEM_MC_REL_JMP_S16(i16Imm);
5172 } IEM_MC_ENDIF();
5173 IEM_MC_END();
5174 }
5175 else
5176 {
5177 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5179
5180 IEM_MC_BEGIN(0, 0);
5181 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5182 IEM_MC_ADVANCE_RIP();
5183 } IEM_MC_ELSE() {
5184 IEM_MC_REL_JMP_S32(i32Imm);
5185 } IEM_MC_ENDIF();
5186 IEM_MC_END();
5187 }
5188 return VINF_SUCCESS;
5189}
5190
5191
5192/** Opcode 0x0f 0x88. */
5193FNIEMOP_DEF(iemOp_js_Jv)
5194{
5195 IEMOP_MNEMONIC(js_Jv, "js Jv");
5196 IEMOP_HLP_MIN_386();
5197 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5198 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5199 {
5200 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5202
5203 IEM_MC_BEGIN(0, 0);
5204 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5205 IEM_MC_REL_JMP_S16(i16Imm);
5206 } IEM_MC_ELSE() {
5207 IEM_MC_ADVANCE_RIP();
5208 } IEM_MC_ENDIF();
5209 IEM_MC_END();
5210 }
5211 else
5212 {
5213 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5215
5216 IEM_MC_BEGIN(0, 0);
5217 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5218 IEM_MC_REL_JMP_S32(i32Imm);
5219 } IEM_MC_ELSE() {
5220 IEM_MC_ADVANCE_RIP();
5221 } IEM_MC_ENDIF();
5222 IEM_MC_END();
5223 }
5224 return VINF_SUCCESS;
5225}
5226
5227
5228/** Opcode 0x0f 0x89. */
5229FNIEMOP_DEF(iemOp_jns_Jv)
5230{
5231 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5232 IEMOP_HLP_MIN_386();
5233 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5234 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5235 {
5236 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5238
5239 IEM_MC_BEGIN(0, 0);
5240 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5241 IEM_MC_ADVANCE_RIP();
5242 } IEM_MC_ELSE() {
5243 IEM_MC_REL_JMP_S16(i16Imm);
5244 } IEM_MC_ENDIF();
5245 IEM_MC_END();
5246 }
5247 else
5248 {
5249 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5251
5252 IEM_MC_BEGIN(0, 0);
5253 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5254 IEM_MC_ADVANCE_RIP();
5255 } IEM_MC_ELSE() {
5256 IEM_MC_REL_JMP_S32(i32Imm);
5257 } IEM_MC_ENDIF();
5258 IEM_MC_END();
5259 }
5260 return VINF_SUCCESS;
5261}
5262
5263
5264/** Opcode 0x0f 0x8a. */
5265FNIEMOP_DEF(iemOp_jp_Jv)
5266{
5267 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5268 IEMOP_HLP_MIN_386();
5269 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5270 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5271 {
5272 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5274
5275 IEM_MC_BEGIN(0, 0);
5276 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5277 IEM_MC_REL_JMP_S16(i16Imm);
5278 } IEM_MC_ELSE() {
5279 IEM_MC_ADVANCE_RIP();
5280 } IEM_MC_ENDIF();
5281 IEM_MC_END();
5282 }
5283 else
5284 {
5285 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5287
5288 IEM_MC_BEGIN(0, 0);
5289 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5290 IEM_MC_REL_JMP_S32(i32Imm);
5291 } IEM_MC_ELSE() {
5292 IEM_MC_ADVANCE_RIP();
5293 } IEM_MC_ENDIF();
5294 IEM_MC_END();
5295 }
5296 return VINF_SUCCESS;
5297}
5298
5299
5300/** Opcode 0x0f 0x8b. */
5301FNIEMOP_DEF(iemOp_jnp_Jv)
5302{
5303 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5304 IEMOP_HLP_MIN_386();
5305 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5306 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5307 {
5308 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5310
5311 IEM_MC_BEGIN(0, 0);
5312 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5313 IEM_MC_ADVANCE_RIP();
5314 } IEM_MC_ELSE() {
5315 IEM_MC_REL_JMP_S16(i16Imm);
5316 } IEM_MC_ENDIF();
5317 IEM_MC_END();
5318 }
5319 else
5320 {
5321 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5323
5324 IEM_MC_BEGIN(0, 0);
5325 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5326 IEM_MC_ADVANCE_RIP();
5327 } IEM_MC_ELSE() {
5328 IEM_MC_REL_JMP_S32(i32Imm);
5329 } IEM_MC_ENDIF();
5330 IEM_MC_END();
5331 }
5332 return VINF_SUCCESS;
5333}
5334
5335
5336/** Opcode 0x0f 0x8c. */
5337FNIEMOP_DEF(iemOp_jl_Jv)
5338{
5339 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5340 IEMOP_HLP_MIN_386();
5341 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5342 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5343 {
5344 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5346
5347 IEM_MC_BEGIN(0, 0);
5348 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5349 IEM_MC_REL_JMP_S16(i16Imm);
5350 } IEM_MC_ELSE() {
5351 IEM_MC_ADVANCE_RIP();
5352 } IEM_MC_ENDIF();
5353 IEM_MC_END();
5354 }
5355 else
5356 {
5357 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5359
5360 IEM_MC_BEGIN(0, 0);
5361 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5362 IEM_MC_REL_JMP_S32(i32Imm);
5363 } IEM_MC_ELSE() {
5364 IEM_MC_ADVANCE_RIP();
5365 } IEM_MC_ENDIF();
5366 IEM_MC_END();
5367 }
5368 return VINF_SUCCESS;
5369}
5370
5371
5372/** Opcode 0x0f 0x8d. */
5373FNIEMOP_DEF(iemOp_jnl_Jv)
5374{
5375 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5376 IEMOP_HLP_MIN_386();
5377 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5378 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5379 {
5380 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5382
5383 IEM_MC_BEGIN(0, 0);
5384 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5385 IEM_MC_ADVANCE_RIP();
5386 } IEM_MC_ELSE() {
5387 IEM_MC_REL_JMP_S16(i16Imm);
5388 } IEM_MC_ENDIF();
5389 IEM_MC_END();
5390 }
5391 else
5392 {
5393 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5395
5396 IEM_MC_BEGIN(0, 0);
5397 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5398 IEM_MC_ADVANCE_RIP();
5399 } IEM_MC_ELSE() {
5400 IEM_MC_REL_JMP_S32(i32Imm);
5401 } IEM_MC_ENDIF();
5402 IEM_MC_END();
5403 }
5404 return VINF_SUCCESS;
5405}
5406
5407
5408/** Opcode 0x0f 0x8e. */
5409FNIEMOP_DEF(iemOp_jle_Jv)
5410{
5411 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5412 IEMOP_HLP_MIN_386();
5413 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5414 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5415 {
5416 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5418
5419 IEM_MC_BEGIN(0, 0);
5420 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5421 IEM_MC_REL_JMP_S16(i16Imm);
5422 } IEM_MC_ELSE() {
5423 IEM_MC_ADVANCE_RIP();
5424 } IEM_MC_ENDIF();
5425 IEM_MC_END();
5426 }
5427 else
5428 {
5429 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5431
5432 IEM_MC_BEGIN(0, 0);
5433 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5434 IEM_MC_REL_JMP_S32(i32Imm);
5435 } IEM_MC_ELSE() {
5436 IEM_MC_ADVANCE_RIP();
5437 } IEM_MC_ENDIF();
5438 IEM_MC_END();
5439 }
5440 return VINF_SUCCESS;
5441}
5442
5443
5444/** Opcode 0x0f 0x8f. */
5445FNIEMOP_DEF(iemOp_jnle_Jv)
5446{
5447 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5448 IEMOP_HLP_MIN_386();
5449 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5450 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5451 {
5452 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5454
5455 IEM_MC_BEGIN(0, 0);
5456 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5457 IEM_MC_ADVANCE_RIP();
5458 } IEM_MC_ELSE() {
5459 IEM_MC_REL_JMP_S16(i16Imm);
5460 } IEM_MC_ENDIF();
5461 IEM_MC_END();
5462 }
5463 else
5464 {
5465 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5467
5468 IEM_MC_BEGIN(0, 0);
5469 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5470 IEM_MC_ADVANCE_RIP();
5471 } IEM_MC_ELSE() {
5472 IEM_MC_REL_JMP_S32(i32Imm);
5473 } IEM_MC_ENDIF();
5474 IEM_MC_END();
5475 }
5476 return VINF_SUCCESS;
5477}
5478
5479
5480/** Opcode 0x0f 0x90. */
5481FNIEMOP_DEF(iemOp_seto_Eb)
5482{
5483 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5484 IEMOP_HLP_MIN_386();
5485 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5486
5487 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5488 * any way. AMD says it's "unused", whatever that means. We're
5489 * ignoring for now. */
5490 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5491 {
5492 /* register target */
5493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5494 IEM_MC_BEGIN(0, 0);
5495 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5496 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5497 } IEM_MC_ELSE() {
5498 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5499 } IEM_MC_ENDIF();
5500 IEM_MC_ADVANCE_RIP();
5501 IEM_MC_END();
5502 }
5503 else
5504 {
5505 /* memory target */
5506 IEM_MC_BEGIN(0, 1);
5507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5510 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5511 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5512 } IEM_MC_ELSE() {
5513 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5514 } IEM_MC_ENDIF();
5515 IEM_MC_ADVANCE_RIP();
5516 IEM_MC_END();
5517 }
5518 return VINF_SUCCESS;
5519}
5520
5521
5522/** Opcode 0x0f 0x91. */
5523FNIEMOP_DEF(iemOp_setno_Eb)
5524{
5525 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5526 IEMOP_HLP_MIN_386();
5527 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5528
5529 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5530 * any way. AMD says it's "unused", whatever that means. We're
5531 * ignoring for now. */
5532 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5533 {
5534 /* register target */
5535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5536 IEM_MC_BEGIN(0, 0);
5537 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5538 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5539 } IEM_MC_ELSE() {
5540 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5541 } IEM_MC_ENDIF();
5542 IEM_MC_ADVANCE_RIP();
5543 IEM_MC_END();
5544 }
5545 else
5546 {
5547 /* memory target */
5548 IEM_MC_BEGIN(0, 1);
5549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5552 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5553 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5554 } IEM_MC_ELSE() {
5555 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5556 } IEM_MC_ENDIF();
5557 IEM_MC_ADVANCE_RIP();
5558 IEM_MC_END();
5559 }
5560 return VINF_SUCCESS;
5561}
5562
5563
5564/** Opcode 0x0f 0x92. */
5565FNIEMOP_DEF(iemOp_setc_Eb)
5566{
5567 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5568 IEMOP_HLP_MIN_386();
5569 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5570
5571 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5572 * any way. AMD says it's "unused", whatever that means. We're
5573 * ignoring for now. */
5574 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5575 {
5576 /* register target */
5577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5578 IEM_MC_BEGIN(0, 0);
5579 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5580 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5581 } IEM_MC_ELSE() {
5582 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5583 } IEM_MC_ENDIF();
5584 IEM_MC_ADVANCE_RIP();
5585 IEM_MC_END();
5586 }
5587 else
5588 {
5589 /* memory target */
5590 IEM_MC_BEGIN(0, 1);
5591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5594 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5595 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5596 } IEM_MC_ELSE() {
5597 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5598 } IEM_MC_ENDIF();
5599 IEM_MC_ADVANCE_RIP();
5600 IEM_MC_END();
5601 }
5602 return VINF_SUCCESS;
5603}
5604
5605
5606/** Opcode 0x0f 0x93. */
5607FNIEMOP_DEF(iemOp_setnc_Eb)
5608{
5609 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5610 IEMOP_HLP_MIN_386();
5611 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5612
5613 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5614 * any way. AMD says it's "unused", whatever that means. We're
5615 * ignoring for now. */
5616 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5617 {
5618 /* register target */
5619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5620 IEM_MC_BEGIN(0, 0);
5621 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5622 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5623 } IEM_MC_ELSE() {
5624 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5625 } IEM_MC_ENDIF();
5626 IEM_MC_ADVANCE_RIP();
5627 IEM_MC_END();
5628 }
5629 else
5630 {
5631 /* memory target */
5632 IEM_MC_BEGIN(0, 1);
5633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5636 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5637 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5638 } IEM_MC_ELSE() {
5639 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5640 } IEM_MC_ENDIF();
5641 IEM_MC_ADVANCE_RIP();
5642 IEM_MC_END();
5643 }
5644 return VINF_SUCCESS;
5645}
5646
5647
5648/** Opcode 0x0f 0x94. */
5649FNIEMOP_DEF(iemOp_sete_Eb)
5650{
5651 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5652 IEMOP_HLP_MIN_386();
5653 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5654
5655 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5656 * any way. AMD says it's "unused", whatever that means. We're
5657 * ignoring for now. */
5658 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5659 {
5660 /* register target */
5661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5662 IEM_MC_BEGIN(0, 0);
5663 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5664 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5665 } IEM_MC_ELSE() {
5666 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5667 } IEM_MC_ENDIF();
5668 IEM_MC_ADVANCE_RIP();
5669 IEM_MC_END();
5670 }
5671 else
5672 {
5673 /* memory target */
5674 IEM_MC_BEGIN(0, 1);
5675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5678 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5679 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5680 } IEM_MC_ELSE() {
5681 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5682 } IEM_MC_ENDIF();
5683 IEM_MC_ADVANCE_RIP();
5684 IEM_MC_END();
5685 }
5686 return VINF_SUCCESS;
5687}
5688
5689
5690/** Opcode 0x0f 0x95. */
5691FNIEMOP_DEF(iemOp_setne_Eb)
5692{
5693 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5694 IEMOP_HLP_MIN_386();
5695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5696
5697 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5698 * any way. AMD says it's "unused", whatever that means. We're
5699 * ignoring for now. */
5700 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5701 {
5702 /* register target */
5703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5704 IEM_MC_BEGIN(0, 0);
5705 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5706 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5707 } IEM_MC_ELSE() {
5708 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5709 } IEM_MC_ENDIF();
5710 IEM_MC_ADVANCE_RIP();
5711 IEM_MC_END();
5712 }
5713 else
5714 {
5715 /* memory target */
5716 IEM_MC_BEGIN(0, 1);
5717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5720 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5721 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5722 } IEM_MC_ELSE() {
5723 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5724 } IEM_MC_ENDIF();
5725 IEM_MC_ADVANCE_RIP();
5726 IEM_MC_END();
5727 }
5728 return VINF_SUCCESS;
5729}
5730
5731
5732/** Opcode 0x0f 0x96. */
5733FNIEMOP_DEF(iemOp_setbe_Eb)
5734{
5735 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5736 IEMOP_HLP_MIN_386();
5737 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5738
5739 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5740 * any way. AMD says it's "unused", whatever that means. We're
5741 * ignoring for now. */
5742 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5743 {
5744 /* register target */
5745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5746 IEM_MC_BEGIN(0, 0);
5747 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5748 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5749 } IEM_MC_ELSE() {
5750 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5751 } IEM_MC_ENDIF();
5752 IEM_MC_ADVANCE_RIP();
5753 IEM_MC_END();
5754 }
5755 else
5756 {
5757 /* memory target */
5758 IEM_MC_BEGIN(0, 1);
5759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5762 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5763 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5764 } IEM_MC_ELSE() {
5765 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5766 } IEM_MC_ENDIF();
5767 IEM_MC_ADVANCE_RIP();
5768 IEM_MC_END();
5769 }
5770 return VINF_SUCCESS;
5771}
5772
5773
5774/** Opcode 0x0f 0x97. */
5775FNIEMOP_DEF(iemOp_setnbe_Eb)
5776{
5777 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5778 IEMOP_HLP_MIN_386();
5779 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5780
5781 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5782 * any way. AMD says it's "unused", whatever that means. We're
5783 * ignoring for now. */
5784 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5785 {
5786 /* register target */
5787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5788 IEM_MC_BEGIN(0, 0);
5789 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5790 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5791 } IEM_MC_ELSE() {
5792 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5793 } IEM_MC_ENDIF();
5794 IEM_MC_ADVANCE_RIP();
5795 IEM_MC_END();
5796 }
5797 else
5798 {
5799 /* memory target */
5800 IEM_MC_BEGIN(0, 1);
5801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5804 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5805 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5806 } IEM_MC_ELSE() {
5807 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5808 } IEM_MC_ENDIF();
5809 IEM_MC_ADVANCE_RIP();
5810 IEM_MC_END();
5811 }
5812 return VINF_SUCCESS;
5813}
5814
5815
5816/** Opcode 0x0f 0x98. */
5817FNIEMOP_DEF(iemOp_sets_Eb)
5818{
5819 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5820 IEMOP_HLP_MIN_386();
5821 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5822
5823 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5824 * any way. AMD says it's "unused", whatever that means. We're
5825 * ignoring for now. */
5826 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5827 {
5828 /* register target */
5829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5830 IEM_MC_BEGIN(0, 0);
5831 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5832 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5833 } IEM_MC_ELSE() {
5834 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5835 } IEM_MC_ENDIF();
5836 IEM_MC_ADVANCE_RIP();
5837 IEM_MC_END();
5838 }
5839 else
5840 {
5841 /* memory target */
5842 IEM_MC_BEGIN(0, 1);
5843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5846 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5847 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5848 } IEM_MC_ELSE() {
5849 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5850 } IEM_MC_ENDIF();
5851 IEM_MC_ADVANCE_RIP();
5852 IEM_MC_END();
5853 }
5854 return VINF_SUCCESS;
5855}
5856
5857
5858/** Opcode 0x0f 0x99. */
5859FNIEMOP_DEF(iemOp_setns_Eb)
5860{
5861 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5862 IEMOP_HLP_MIN_386();
5863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5864
5865 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5866 * any way. AMD says it's "unused", whatever that means. We're
5867 * ignoring for now. */
5868 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5869 {
5870 /* register target */
5871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5872 IEM_MC_BEGIN(0, 0);
5873 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5874 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5875 } IEM_MC_ELSE() {
5876 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5877 } IEM_MC_ENDIF();
5878 IEM_MC_ADVANCE_RIP();
5879 IEM_MC_END();
5880 }
5881 else
5882 {
5883 /* memory target */
5884 IEM_MC_BEGIN(0, 1);
5885 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5888 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5889 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5890 } IEM_MC_ELSE() {
5891 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5892 } IEM_MC_ENDIF();
5893 IEM_MC_ADVANCE_RIP();
5894 IEM_MC_END();
5895 }
5896 return VINF_SUCCESS;
5897}
5898
5899
5900/** Opcode 0x0f 0x9a. */
5901FNIEMOP_DEF(iemOp_setp_Eb)
5902{
5903 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5904 IEMOP_HLP_MIN_386();
5905 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5906
5907 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5908 * any way. AMD says it's "unused", whatever that means. We're
5909 * ignoring for now. */
5910 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5911 {
5912 /* register target */
5913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5914 IEM_MC_BEGIN(0, 0);
5915 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5916 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5917 } IEM_MC_ELSE() {
5918 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5919 } IEM_MC_ENDIF();
5920 IEM_MC_ADVANCE_RIP();
5921 IEM_MC_END();
5922 }
5923 else
5924 {
5925 /* memory target */
5926 IEM_MC_BEGIN(0, 1);
5927 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5928 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5930 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5931 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5932 } IEM_MC_ELSE() {
5933 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5934 } IEM_MC_ENDIF();
5935 IEM_MC_ADVANCE_RIP();
5936 IEM_MC_END();
5937 }
5938 return VINF_SUCCESS;
5939}
5940
5941
5942/** Opcode 0x0f 0x9b. */
5943FNIEMOP_DEF(iemOp_setnp_Eb)
5944{
5945 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5946 IEMOP_HLP_MIN_386();
5947 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5948
5949 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5950 * any way. AMD says it's "unused", whatever that means. We're
5951 * ignoring for now. */
5952 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5953 {
5954 /* register target */
5955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5956 IEM_MC_BEGIN(0, 0);
5957 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5958 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5959 } IEM_MC_ELSE() {
5960 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5961 } IEM_MC_ENDIF();
5962 IEM_MC_ADVANCE_RIP();
5963 IEM_MC_END();
5964 }
5965 else
5966 {
5967 /* memory target */
5968 IEM_MC_BEGIN(0, 1);
5969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5972 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5973 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5974 } IEM_MC_ELSE() {
5975 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5976 } IEM_MC_ENDIF();
5977 IEM_MC_ADVANCE_RIP();
5978 IEM_MC_END();
5979 }
5980 return VINF_SUCCESS;
5981}
5982
5983
5984/** Opcode 0x0f 0x9c. */
5985FNIEMOP_DEF(iemOp_setl_Eb)
5986{
5987 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5988 IEMOP_HLP_MIN_386();
5989 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5990
5991 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5992 * any way. AMD says it's "unused", whatever that means. We're
5993 * ignoring for now. */
5994 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5995 {
5996 /* register target */
5997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5998 IEM_MC_BEGIN(0, 0);
5999 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6000 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6001 } IEM_MC_ELSE() {
6002 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6003 } IEM_MC_ENDIF();
6004 IEM_MC_ADVANCE_RIP();
6005 IEM_MC_END();
6006 }
6007 else
6008 {
6009 /* memory target */
6010 IEM_MC_BEGIN(0, 1);
6011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6014 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6015 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6016 } IEM_MC_ELSE() {
6017 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6018 } IEM_MC_ENDIF();
6019 IEM_MC_ADVANCE_RIP();
6020 IEM_MC_END();
6021 }
6022 return VINF_SUCCESS;
6023}
6024
6025
6026/** Opcode 0x0f 0x9d. */
6027FNIEMOP_DEF(iemOp_setnl_Eb)
6028{
6029 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
6030 IEMOP_HLP_MIN_386();
6031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6032
6033 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6034 * any way. AMD says it's "unused", whatever that means. We're
6035 * ignoring for now. */
6036 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6037 {
6038 /* register target */
6039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6040 IEM_MC_BEGIN(0, 0);
6041 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6042 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6043 } IEM_MC_ELSE() {
6044 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6045 } IEM_MC_ENDIF();
6046 IEM_MC_ADVANCE_RIP();
6047 IEM_MC_END();
6048 }
6049 else
6050 {
6051 /* memory target */
6052 IEM_MC_BEGIN(0, 1);
6053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6056 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6057 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6058 } IEM_MC_ELSE() {
6059 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6060 } IEM_MC_ENDIF();
6061 IEM_MC_ADVANCE_RIP();
6062 IEM_MC_END();
6063 }
6064 return VINF_SUCCESS;
6065}
6066
6067
6068/** Opcode 0x0f 0x9e. */
6069FNIEMOP_DEF(iemOp_setle_Eb)
6070{
6071 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
6072 IEMOP_HLP_MIN_386();
6073 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6074
6075 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6076 * any way. AMD says it's "unused", whatever that means. We're
6077 * ignoring for now. */
6078 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6079 {
6080 /* register target */
6081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6082 IEM_MC_BEGIN(0, 0);
6083 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6084 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6085 } IEM_MC_ELSE() {
6086 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6087 } IEM_MC_ENDIF();
6088 IEM_MC_ADVANCE_RIP();
6089 IEM_MC_END();
6090 }
6091 else
6092 {
6093 /* memory target */
6094 IEM_MC_BEGIN(0, 1);
6095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6098 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6099 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6100 } IEM_MC_ELSE() {
6101 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6102 } IEM_MC_ENDIF();
6103 IEM_MC_ADVANCE_RIP();
6104 IEM_MC_END();
6105 }
6106 return VINF_SUCCESS;
6107}
6108
6109
6110/** Opcode 0x0f 0x9f. */
6111FNIEMOP_DEF(iemOp_setnle_Eb)
6112{
6113 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
6114 IEMOP_HLP_MIN_386();
6115 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6116
6117 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6118 * any way. AMD says it's "unused", whatever that means. We're
6119 * ignoring for now. */
6120 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6121 {
6122 /* register target */
6123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6124 IEM_MC_BEGIN(0, 0);
6125 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6126 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6127 } IEM_MC_ELSE() {
6128 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6129 } IEM_MC_ENDIF();
6130 IEM_MC_ADVANCE_RIP();
6131 IEM_MC_END();
6132 }
6133 else
6134 {
6135 /* memory target */
6136 IEM_MC_BEGIN(0, 1);
6137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6140 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6141 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6142 } IEM_MC_ELSE() {
6143 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6144 } IEM_MC_ENDIF();
6145 IEM_MC_ADVANCE_RIP();
6146 IEM_MC_END();
6147 }
6148 return VINF_SUCCESS;
6149}
6150
6151
6152/**
6153 * Common 'push segment-register' helper.
6154 */
6155FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
6156{
6157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6158 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
6159 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6160
6161 switch (pVCpu->iem.s.enmEffOpSize)
6162 {
6163 case IEMMODE_16BIT:
6164 IEM_MC_BEGIN(0, 1);
6165 IEM_MC_LOCAL(uint16_t, u16Value);
6166 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
6167 IEM_MC_PUSH_U16(u16Value);
6168 IEM_MC_ADVANCE_RIP();
6169 IEM_MC_END();
6170 break;
6171
6172 case IEMMODE_32BIT:
6173 IEM_MC_BEGIN(0, 1);
6174 IEM_MC_LOCAL(uint32_t, u32Value);
6175 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
6176 IEM_MC_PUSH_U32_SREG(u32Value);
6177 IEM_MC_ADVANCE_RIP();
6178 IEM_MC_END();
6179 break;
6180
6181 case IEMMODE_64BIT:
6182 IEM_MC_BEGIN(0, 1);
6183 IEM_MC_LOCAL(uint64_t, u64Value);
6184 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
6185 IEM_MC_PUSH_U64(u64Value);
6186 IEM_MC_ADVANCE_RIP();
6187 IEM_MC_END();
6188 break;
6189 }
6190
6191 return VINF_SUCCESS;
6192}
6193
6194
6195/** Opcode 0x0f 0xa0. */
6196FNIEMOP_DEF(iemOp_push_fs)
6197{
6198 IEMOP_MNEMONIC(push_fs, "push fs");
6199 IEMOP_HLP_MIN_386();
6200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6201 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
6202}
6203
6204
6205/** Opcode 0x0f 0xa1. */
6206FNIEMOP_DEF(iemOp_pop_fs)
6207{
6208 IEMOP_MNEMONIC(pop_fs, "pop fs");
6209 IEMOP_HLP_MIN_386();
6210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6211 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
6212}
6213
6214
6215/** Opcode 0x0f 0xa2. */
6216FNIEMOP_DEF(iemOp_cpuid)
6217{
6218 IEMOP_MNEMONIC(cpuid, "cpuid");
6219 IEMOP_HLP_MIN_486(); /* not all 486es. */
6220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6221 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
6222}
6223
6224
6225/**
6226 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
6227 * iemOp_bts_Ev_Gv.
6228 */
6229FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6230{
6231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6232 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6233
6234 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6235 {
6236 /* register destination. */
6237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6238 switch (pVCpu->iem.s.enmEffOpSize)
6239 {
6240 case IEMMODE_16BIT:
6241 IEM_MC_BEGIN(3, 0);
6242 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6243 IEM_MC_ARG(uint16_t, u16Src, 1);
6244 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6245
6246 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6247 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6248 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6249 IEM_MC_REF_EFLAGS(pEFlags);
6250 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6251
6252 IEM_MC_ADVANCE_RIP();
6253 IEM_MC_END();
6254 return VINF_SUCCESS;
6255
6256 case IEMMODE_32BIT:
6257 IEM_MC_BEGIN(3, 0);
6258 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6259 IEM_MC_ARG(uint32_t, u32Src, 1);
6260 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6261
6262 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6263 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6264 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6265 IEM_MC_REF_EFLAGS(pEFlags);
6266 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6267
6268 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6269 IEM_MC_ADVANCE_RIP();
6270 IEM_MC_END();
6271 return VINF_SUCCESS;
6272
6273 case IEMMODE_64BIT:
6274 IEM_MC_BEGIN(3, 0);
6275 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6276 IEM_MC_ARG(uint64_t, u64Src, 1);
6277 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6278
6279 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6280 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6281 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6282 IEM_MC_REF_EFLAGS(pEFlags);
6283 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6284
6285 IEM_MC_ADVANCE_RIP();
6286 IEM_MC_END();
6287 return VINF_SUCCESS;
6288
6289 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6290 }
6291 }
6292 else
6293 {
6294 /* memory destination. */
6295
6296 uint32_t fAccess;
6297 if (pImpl->pfnLockedU16)
6298 fAccess = IEM_ACCESS_DATA_RW;
6299 else /* BT */
6300 fAccess = IEM_ACCESS_DATA_R;
6301
6302 /** @todo test negative bit offsets! */
6303 switch (pVCpu->iem.s.enmEffOpSize)
6304 {
6305 case IEMMODE_16BIT:
6306 IEM_MC_BEGIN(3, 2);
6307 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6308 IEM_MC_ARG(uint16_t, u16Src, 1);
6309 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6311 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6312
6313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6314 if (pImpl->pfnLockedU16)
6315 IEMOP_HLP_DONE_DECODING();
6316 else
6317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6318 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6319 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6320 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6321 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6322 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6323 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6324 IEM_MC_FETCH_EFLAGS(EFlags);
6325
6326 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6327 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6328 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6329 else
6330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6331 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6332
6333 IEM_MC_COMMIT_EFLAGS(EFlags);
6334 IEM_MC_ADVANCE_RIP();
6335 IEM_MC_END();
6336 return VINF_SUCCESS;
6337
6338 case IEMMODE_32BIT:
6339 IEM_MC_BEGIN(3, 2);
6340 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6341 IEM_MC_ARG(uint32_t, u32Src, 1);
6342 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6344 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6345
6346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6347 if (pImpl->pfnLockedU16)
6348 IEMOP_HLP_DONE_DECODING();
6349 else
6350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6351 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6352 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6353 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6354 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6355 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6356 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6357 IEM_MC_FETCH_EFLAGS(EFlags);
6358
6359 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6360 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6361 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6362 else
6363 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6364 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6365
6366 IEM_MC_COMMIT_EFLAGS(EFlags);
6367 IEM_MC_ADVANCE_RIP();
6368 IEM_MC_END();
6369 return VINF_SUCCESS;
6370
6371 case IEMMODE_64BIT:
6372 IEM_MC_BEGIN(3, 2);
6373 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6374 IEM_MC_ARG(uint64_t, u64Src, 1);
6375 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6377 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6378
6379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6380 if (pImpl->pfnLockedU16)
6381 IEMOP_HLP_DONE_DECODING();
6382 else
6383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6384 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6385 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6386 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6387 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6388 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6389 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6390 IEM_MC_FETCH_EFLAGS(EFlags);
6391
6392 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6393 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6394 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6395 else
6396 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6397 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6398
6399 IEM_MC_COMMIT_EFLAGS(EFlags);
6400 IEM_MC_ADVANCE_RIP();
6401 IEM_MC_END();
6402 return VINF_SUCCESS;
6403
6404 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6405 }
6406 }
6407}
6408
6409
6410/** Opcode 0x0f 0xa3. */
6411FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6412{
6413 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6414 IEMOP_HLP_MIN_386();
6415 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6416}
6417
6418
6419/**
6420 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6421 */
6422FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6423{
6424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6425 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6426
6427 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6428 {
6429 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6431
6432 switch (pVCpu->iem.s.enmEffOpSize)
6433 {
6434 case IEMMODE_16BIT:
6435 IEM_MC_BEGIN(4, 0);
6436 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6437 IEM_MC_ARG(uint16_t, u16Src, 1);
6438 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6439 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6440
6441 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6442 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6443 IEM_MC_REF_EFLAGS(pEFlags);
6444 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6445
6446 IEM_MC_ADVANCE_RIP();
6447 IEM_MC_END();
6448 return VINF_SUCCESS;
6449
6450 case IEMMODE_32BIT:
6451 IEM_MC_BEGIN(4, 0);
6452 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6453 IEM_MC_ARG(uint32_t, u32Src, 1);
6454 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6455 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6456
6457 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6458 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6459 IEM_MC_REF_EFLAGS(pEFlags);
6460 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6461
6462 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6463 IEM_MC_ADVANCE_RIP();
6464 IEM_MC_END();
6465 return VINF_SUCCESS;
6466
6467 case IEMMODE_64BIT:
6468 IEM_MC_BEGIN(4, 0);
6469 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6470 IEM_MC_ARG(uint64_t, u64Src, 1);
6471 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6472 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6473
6474 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6475 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6476 IEM_MC_REF_EFLAGS(pEFlags);
6477 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6478
6479 IEM_MC_ADVANCE_RIP();
6480 IEM_MC_END();
6481 return VINF_SUCCESS;
6482
6483 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6484 }
6485 }
6486 else
6487 {
6488 switch (pVCpu->iem.s.enmEffOpSize)
6489 {
6490 case IEMMODE_16BIT:
6491 IEM_MC_BEGIN(4, 2);
6492 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6493 IEM_MC_ARG(uint16_t, u16Src, 1);
6494 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6495 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6497
6498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6499 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6500 IEM_MC_ASSIGN(cShiftArg, cShift);
6501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6502 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6503 IEM_MC_FETCH_EFLAGS(EFlags);
6504 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6505 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6506
6507 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6508 IEM_MC_COMMIT_EFLAGS(EFlags);
6509 IEM_MC_ADVANCE_RIP();
6510 IEM_MC_END();
6511 return VINF_SUCCESS;
6512
6513 case IEMMODE_32BIT:
6514 IEM_MC_BEGIN(4, 2);
6515 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6516 IEM_MC_ARG(uint32_t, u32Src, 1);
6517 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6518 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6520
6521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6522 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6523 IEM_MC_ASSIGN(cShiftArg, cShift);
6524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6525 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6526 IEM_MC_FETCH_EFLAGS(EFlags);
6527 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6528 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6529
6530 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6531 IEM_MC_COMMIT_EFLAGS(EFlags);
6532 IEM_MC_ADVANCE_RIP();
6533 IEM_MC_END();
6534 return VINF_SUCCESS;
6535
6536 case IEMMODE_64BIT:
6537 IEM_MC_BEGIN(4, 2);
6538 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6539 IEM_MC_ARG(uint64_t, u64Src, 1);
6540 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6541 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6543
6544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6545 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6546 IEM_MC_ASSIGN(cShiftArg, cShift);
6547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6548 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6549 IEM_MC_FETCH_EFLAGS(EFlags);
6550 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6551 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6552
6553 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6554 IEM_MC_COMMIT_EFLAGS(EFlags);
6555 IEM_MC_ADVANCE_RIP();
6556 IEM_MC_END();
6557 return VINF_SUCCESS;
6558
6559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6560 }
6561 }
6562}
6563
6564
6565/**
6566 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6567 */
6568FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6569{
6570 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6571 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6572
6573 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6574 {
6575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6576
6577 switch (pVCpu->iem.s.enmEffOpSize)
6578 {
6579 case IEMMODE_16BIT:
6580 IEM_MC_BEGIN(4, 0);
6581 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6582 IEM_MC_ARG(uint16_t, u16Src, 1);
6583 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6584 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6585
6586 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6587 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6588 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6589 IEM_MC_REF_EFLAGS(pEFlags);
6590 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6591
6592 IEM_MC_ADVANCE_RIP();
6593 IEM_MC_END();
6594 return VINF_SUCCESS;
6595
6596 case IEMMODE_32BIT:
6597 IEM_MC_BEGIN(4, 0);
6598 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6599 IEM_MC_ARG(uint32_t, u32Src, 1);
6600 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6601 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6602
6603 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6604 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6605 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6606 IEM_MC_REF_EFLAGS(pEFlags);
6607 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6608
6609 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6610 IEM_MC_ADVANCE_RIP();
6611 IEM_MC_END();
6612 return VINF_SUCCESS;
6613
6614 case IEMMODE_64BIT:
6615 IEM_MC_BEGIN(4, 0);
6616 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6617 IEM_MC_ARG(uint64_t, u64Src, 1);
6618 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6619 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6620
6621 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6622 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6623 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6624 IEM_MC_REF_EFLAGS(pEFlags);
6625 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6626
6627 IEM_MC_ADVANCE_RIP();
6628 IEM_MC_END();
6629 return VINF_SUCCESS;
6630
6631 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6632 }
6633 }
6634 else
6635 {
6636 switch (pVCpu->iem.s.enmEffOpSize)
6637 {
6638 case IEMMODE_16BIT:
6639 IEM_MC_BEGIN(4, 2);
6640 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6641 IEM_MC_ARG(uint16_t, u16Src, 1);
6642 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6643 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6645
6646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6648 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6649 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6650 IEM_MC_FETCH_EFLAGS(EFlags);
6651 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6652 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6653
6654 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6655 IEM_MC_COMMIT_EFLAGS(EFlags);
6656 IEM_MC_ADVANCE_RIP();
6657 IEM_MC_END();
6658 return VINF_SUCCESS;
6659
6660 case IEMMODE_32BIT:
6661 IEM_MC_BEGIN(4, 2);
6662 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6663 IEM_MC_ARG(uint32_t, u32Src, 1);
6664 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6665 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6667
6668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6670 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6671 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6672 IEM_MC_FETCH_EFLAGS(EFlags);
6673 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6674 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6675
6676 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6677 IEM_MC_COMMIT_EFLAGS(EFlags);
6678 IEM_MC_ADVANCE_RIP();
6679 IEM_MC_END();
6680 return VINF_SUCCESS;
6681
6682 case IEMMODE_64BIT:
6683 IEM_MC_BEGIN(4, 2);
6684 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6685 IEM_MC_ARG(uint64_t, u64Src, 1);
6686 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6687 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6689
6690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6692 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6693 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6694 IEM_MC_FETCH_EFLAGS(EFlags);
6695 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6696 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6697
6698 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6699 IEM_MC_COMMIT_EFLAGS(EFlags);
6700 IEM_MC_ADVANCE_RIP();
6701 IEM_MC_END();
6702 return VINF_SUCCESS;
6703
6704 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6705 }
6706 }
6707}
6708
6709
6710
6711/** Opcode 0x0f 0xa4. */
6712FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6713{
6714 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6715 IEMOP_HLP_MIN_386();
6716 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6717}
6718
6719
6720/** Opcode 0x0f 0xa5. */
6721FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6722{
6723 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6724 IEMOP_HLP_MIN_386();
6725 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6726}
6727
6728
6729/** Opcode 0x0f 0xa8. */
6730FNIEMOP_DEF(iemOp_push_gs)
6731{
6732 IEMOP_MNEMONIC(push_gs, "push gs");
6733 IEMOP_HLP_MIN_386();
6734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6735 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6736}
6737
6738
6739/** Opcode 0x0f 0xa9. */
6740FNIEMOP_DEF(iemOp_pop_gs)
6741{
6742 IEMOP_MNEMONIC(pop_gs, "pop gs");
6743 IEMOP_HLP_MIN_386();
6744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6745 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6746}
6747
6748
6749/** Opcode 0x0f 0xaa. */
6750FNIEMOP_DEF(iemOp_rsm)
6751{
6752 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
6753 IEMOP_HLP_MIN_386(); /* 386SL and later. */
6754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6755 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
6756}
6757
6758
6759
6760/** Opcode 0x0f 0xab. */
6761FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6762{
6763 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6764 IEMOP_HLP_MIN_386();
6765 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6766}
6767
6768
6769/** Opcode 0x0f 0xac. */
6770FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6771{
6772 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6773 IEMOP_HLP_MIN_386();
6774 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6775}
6776
6777
6778/** Opcode 0x0f 0xad. */
6779FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6780{
6781 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6782 IEMOP_HLP_MIN_386();
6783 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6784}
6785
6786
6787/** Opcode 0x0f 0xae mem/0. */
6788FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6789{
6790 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6791 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6792 return IEMOP_RAISE_INVALID_OPCODE();
6793
6794 IEM_MC_BEGIN(3, 1);
6795 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6796 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6797 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6800 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6801 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6802 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6803 IEM_MC_END();
6804 return VINF_SUCCESS;
6805}
6806
6807
6808/** Opcode 0x0f 0xae mem/1. */
6809FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6810{
6811 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6812 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6813 return IEMOP_RAISE_INVALID_OPCODE();
6814
6815 IEM_MC_BEGIN(3, 1);
6816 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6817 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6818 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6821 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6822 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6823 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6824 IEM_MC_END();
6825 return VINF_SUCCESS;
6826}
6827
6828
6829/**
6830 * @opmaps grp15
6831 * @opcode !11/2
6832 * @oppfx none
6833 * @opcpuid sse
6834 * @opgroup og_sse_mxcsrsm
6835 * @opxcpttype 5
6836 * @optest op1=0 -> mxcsr=0
6837 * @optest op1=0x2083 -> mxcsr=0x2083
6838 * @optest op1=0xfffffffe -> value.xcpt=0xd
6839 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6840 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6841 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6842 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6843 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6844 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6845 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6846 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6847 */
6848FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6849{
6850 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6851 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6852 return IEMOP_RAISE_INVALID_OPCODE();
6853
6854 IEM_MC_BEGIN(2, 0);
6855 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6856 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6859 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6860 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6861 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6862 IEM_MC_END();
6863 return VINF_SUCCESS;
6864}
6865
6866
6867/**
6868 * @opmaps grp15
6869 * @opcode !11/3
6870 * @oppfx none
6871 * @opcpuid sse
6872 * @opgroup og_sse_mxcsrsm
6873 * @opxcpttype 5
6874 * @optest mxcsr=0 -> op1=0
6875 * @optest mxcsr=0x2083 -> op1=0x2083
6876 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6877 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6878 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6879 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6880 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6881 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6882 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6883 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6884 */
6885FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6886{
6887 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6888 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6889 return IEMOP_RAISE_INVALID_OPCODE();
6890
6891 IEM_MC_BEGIN(2, 0);
6892 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6893 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6896 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6897 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6898 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6899 IEM_MC_END();
6900 return VINF_SUCCESS;
6901}
6902
6903
6904/**
6905 * @opmaps grp15
6906 * @opcode !11/4
6907 * @oppfx none
6908 * @opcpuid xsave
6909 * @opgroup og_system
6910 * @opxcpttype none
6911 */
6912FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6913{
6914 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6915 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6916 return IEMOP_RAISE_INVALID_OPCODE();
6917
6918 IEM_MC_BEGIN(3, 0);
6919 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6920 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6921 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6924 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6925 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6926 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6927 IEM_MC_END();
6928 return VINF_SUCCESS;
6929}
6930
6931
6932/**
6933 * @opmaps grp15
6934 * @opcode !11/5
6935 * @oppfx none
6936 * @opcpuid xsave
6937 * @opgroup og_system
6938 * @opxcpttype none
6939 */
6940FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6941{
6942 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6943 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6944 return IEMOP_RAISE_INVALID_OPCODE();
6945
6946 IEM_MC_BEGIN(3, 0);
6947 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6948 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6949 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6952 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6953 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6954 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6955 IEM_MC_END();
6956 return VINF_SUCCESS;
6957}
6958
6959/** Opcode 0x0f 0xae mem/6. */
6960FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6961
6962/**
6963 * @opmaps grp15
6964 * @opcode !11/7
6965 * @oppfx none
6966 * @opcpuid clfsh
6967 * @opgroup og_cachectl
6968 * @optest op1=1 ->
6969 */
6970FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6971{
6972 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6973 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6974 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6975
6976 IEM_MC_BEGIN(2, 0);
6977 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6978 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6981 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6982 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6983 IEM_MC_END();
6984 return VINF_SUCCESS;
6985}
6986
6987/**
6988 * @opmaps grp15
6989 * @opcode !11/7
6990 * @oppfx 0x66
6991 * @opcpuid clflushopt
6992 * @opgroup og_cachectl
6993 * @optest op1=1 ->
6994 */
6995FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6996{
6997 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6998 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6999 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
7000
7001 IEM_MC_BEGIN(2, 0);
7002 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7003 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7006 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7007 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
7008 IEM_MC_END();
7009 return VINF_SUCCESS;
7010}
7011
7012
7013/** Opcode 0x0f 0xae 11b/5. */
7014FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
7015{
7016 RT_NOREF_PV(bRm);
7017 IEMOP_MNEMONIC(lfence, "lfence");
7018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7019 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7020 return IEMOP_RAISE_INVALID_OPCODE();
7021
7022 IEM_MC_BEGIN(0, 0);
7023 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7024 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
7025 else
7026 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7027 IEM_MC_ADVANCE_RIP();
7028 IEM_MC_END();
7029 return VINF_SUCCESS;
7030}
7031
7032
7033/** Opcode 0x0f 0xae 11b/6. */
7034FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
7035{
7036 RT_NOREF_PV(bRm);
7037 IEMOP_MNEMONIC(mfence, "mfence");
7038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7039 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7040 return IEMOP_RAISE_INVALID_OPCODE();
7041
7042 IEM_MC_BEGIN(0, 0);
7043 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7044 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
7045 else
7046 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7047 IEM_MC_ADVANCE_RIP();
7048 IEM_MC_END();
7049 return VINF_SUCCESS;
7050}
7051
7052
7053/** Opcode 0x0f 0xae 11b/7. */
7054FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
7055{
7056 RT_NOREF_PV(bRm);
7057 IEMOP_MNEMONIC(sfence, "sfence");
7058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7059 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7060 return IEMOP_RAISE_INVALID_OPCODE();
7061
7062 IEM_MC_BEGIN(0, 0);
7063 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7064 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
7065 else
7066 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7067 IEM_MC_ADVANCE_RIP();
7068 IEM_MC_END();
7069 return VINF_SUCCESS;
7070}
7071
7072
7073/** Opcode 0xf3 0x0f 0xae 11b/0. */
7074FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
7075{
7076 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
7077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7078 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7079 {
7080 IEM_MC_BEGIN(1, 0);
7081 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7082 IEM_MC_ARG(uint64_t, u64Dst, 0);
7083 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
7084 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
7085 IEM_MC_ADVANCE_RIP();
7086 IEM_MC_END();
7087 }
7088 else
7089 {
7090 IEM_MC_BEGIN(1, 0);
7091 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7092 IEM_MC_ARG(uint32_t, u32Dst, 0);
7093 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
7094 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
7095 IEM_MC_ADVANCE_RIP();
7096 IEM_MC_END();
7097 }
7098 return VINF_SUCCESS;
7099}
7100
7101
7102/** Opcode 0xf3 0x0f 0xae 11b/1. */
7103FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
7104{
7105 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
7106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7107 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7108 {
7109 IEM_MC_BEGIN(1, 0);
7110 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7111 IEM_MC_ARG(uint64_t, u64Dst, 0);
7112 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
7113 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
7114 IEM_MC_ADVANCE_RIP();
7115 IEM_MC_END();
7116 }
7117 else
7118 {
7119 IEM_MC_BEGIN(1, 0);
7120 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7121 IEM_MC_ARG(uint32_t, u32Dst, 0);
7122 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
7123 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
7124 IEM_MC_ADVANCE_RIP();
7125 IEM_MC_END();
7126 }
7127 return VINF_SUCCESS;
7128}
7129
7130
7131/** Opcode 0xf3 0x0f 0xae 11b/2. */
7132FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
7133{
7134 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
7135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7136 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7137 {
7138 IEM_MC_BEGIN(1, 0);
7139 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7140 IEM_MC_ARG(uint64_t, u64Dst, 0);
7141 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7142 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7143 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
7144 IEM_MC_ADVANCE_RIP();
7145 IEM_MC_END();
7146 }
7147 else
7148 {
7149 IEM_MC_BEGIN(1, 0);
7150 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7151 IEM_MC_ARG(uint32_t, u32Dst, 0);
7152 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7153 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
7154 IEM_MC_ADVANCE_RIP();
7155 IEM_MC_END();
7156 }
7157 return VINF_SUCCESS;
7158}
7159
7160
7161/** Opcode 0xf3 0x0f 0xae 11b/3. */
7162FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
7163{
7164 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
7165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7166 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7167 {
7168 IEM_MC_BEGIN(1, 0);
7169 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7170 IEM_MC_ARG(uint64_t, u64Dst, 0);
7171 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7172 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7173 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
7174 IEM_MC_ADVANCE_RIP();
7175 IEM_MC_END();
7176 }
7177 else
7178 {
7179 IEM_MC_BEGIN(1, 0);
7180 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7181 IEM_MC_ARG(uint32_t, u32Dst, 0);
7182 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7183 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
7184 IEM_MC_ADVANCE_RIP();
7185 IEM_MC_END();
7186 }
7187 return VINF_SUCCESS;
7188}
7189
7190
7191/**
7192 * Group 15 jump table for register variant.
7193 */
7194IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
7195{ /* pfx: none, 066h, 0f3h, 0f2h */
7196 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
7197 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
7198 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
7199 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
7200 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7201 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7202 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7203 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7204};
7205AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
7206
7207
7208/**
7209 * Group 15 jump table for memory variant.
7210 */
7211IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
7212{ /* pfx: none, 066h, 0f3h, 0f2h */
7213 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7214 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7215 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7216 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7217 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7218 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7219 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7220 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7221};
7222AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
7223
7224
7225/** Opcode 0x0f 0xae. */
7226FNIEMOP_DEF(iemOp_Grp15)
7227{
7228 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
7229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7230 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7231 /* register, register */
7232 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7233 + pVCpu->iem.s.idxPrefix], bRm);
7234 /* memory, register */
7235 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7236 + pVCpu->iem.s.idxPrefix], bRm);
7237}
7238
7239
7240/** Opcode 0x0f 0xaf. */
7241FNIEMOP_DEF(iemOp_imul_Gv_Ev)
7242{
7243 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
7244 IEMOP_HLP_MIN_386();
7245 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7246 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
7247}
7248
7249
7250/** Opcode 0x0f 0xb0. */
7251FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
7252{
7253 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
7254 IEMOP_HLP_MIN_486();
7255 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7256
7257 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7258 {
7259 IEMOP_HLP_DONE_DECODING();
7260 IEM_MC_BEGIN(4, 0);
7261 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7262 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7263 IEM_MC_ARG(uint8_t, u8Src, 2);
7264 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7265
7266 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7267 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7268 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
7269 IEM_MC_REF_EFLAGS(pEFlags);
7270 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7271 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7272 else
7273 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7274
7275 IEM_MC_ADVANCE_RIP();
7276 IEM_MC_END();
7277 }
7278 else
7279 {
7280 IEM_MC_BEGIN(4, 3);
7281 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7282 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7283 IEM_MC_ARG(uint8_t, u8Src, 2);
7284 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7286 IEM_MC_LOCAL(uint8_t, u8Al);
7287
7288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7289 IEMOP_HLP_DONE_DECODING();
7290 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7291 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7292 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
7293 IEM_MC_FETCH_EFLAGS(EFlags);
7294 IEM_MC_REF_LOCAL(pu8Al, u8Al);
7295 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7296 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7297 else
7298 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7299
7300 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7301 IEM_MC_COMMIT_EFLAGS(EFlags);
7302 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
7303 IEM_MC_ADVANCE_RIP();
7304 IEM_MC_END();
7305 }
7306 return VINF_SUCCESS;
7307}
7308
7309/** Opcode 0x0f 0xb1. */
7310FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
7311{
7312 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
7313 IEMOP_HLP_MIN_486();
7314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7315
7316 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7317 {
7318 IEMOP_HLP_DONE_DECODING();
7319 switch (pVCpu->iem.s.enmEffOpSize)
7320 {
7321 case IEMMODE_16BIT:
7322 IEM_MC_BEGIN(4, 0);
7323 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7324 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7325 IEM_MC_ARG(uint16_t, u16Src, 2);
7326 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7327
7328 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7329 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7330 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
7331 IEM_MC_REF_EFLAGS(pEFlags);
7332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7333 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7334 else
7335 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7336
7337 IEM_MC_ADVANCE_RIP();
7338 IEM_MC_END();
7339 return VINF_SUCCESS;
7340
7341 case IEMMODE_32BIT:
7342 IEM_MC_BEGIN(4, 0);
7343 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7344 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7345 IEM_MC_ARG(uint32_t, u32Src, 2);
7346 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7347
7348 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7349 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7350 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7351 IEM_MC_REF_EFLAGS(pEFlags);
7352 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7353 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7354 else
7355 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7356
7357 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7358 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7359 IEM_MC_ADVANCE_RIP();
7360 IEM_MC_END();
7361 return VINF_SUCCESS;
7362
7363 case IEMMODE_64BIT:
7364 IEM_MC_BEGIN(4, 0);
7365 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7366 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7367#ifdef RT_ARCH_X86
7368 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7369#else
7370 IEM_MC_ARG(uint64_t, u64Src, 2);
7371#endif
7372 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7373
7374 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7375 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7376 IEM_MC_REF_EFLAGS(pEFlags);
7377#ifdef RT_ARCH_X86
7378 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7379 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7380 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7381 else
7382 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7383#else
7384 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7385 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7386 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7387 else
7388 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7389#endif
7390
7391 IEM_MC_ADVANCE_RIP();
7392 IEM_MC_END();
7393 return VINF_SUCCESS;
7394
7395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7396 }
7397 }
7398 else
7399 {
7400 switch (pVCpu->iem.s.enmEffOpSize)
7401 {
7402 case IEMMODE_16BIT:
7403 IEM_MC_BEGIN(4, 3);
7404 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7405 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7406 IEM_MC_ARG(uint16_t, u16Src, 2);
7407 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7409 IEM_MC_LOCAL(uint16_t, u16Ax);
7410
7411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7412 IEMOP_HLP_DONE_DECODING();
7413 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7414 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7415 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7416 IEM_MC_FETCH_EFLAGS(EFlags);
7417 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7418 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7419 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7420 else
7421 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7422
7423 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7424 IEM_MC_COMMIT_EFLAGS(EFlags);
7425 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7426 IEM_MC_ADVANCE_RIP();
7427 IEM_MC_END();
7428 return VINF_SUCCESS;
7429
7430 case IEMMODE_32BIT:
7431 IEM_MC_BEGIN(4, 3);
7432 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7433 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7434 IEM_MC_ARG(uint32_t, u32Src, 2);
7435 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7437 IEM_MC_LOCAL(uint32_t, u32Eax);
7438
7439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7440 IEMOP_HLP_DONE_DECODING();
7441 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7442 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7443 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7444 IEM_MC_FETCH_EFLAGS(EFlags);
7445 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7446 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7447 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7448 else
7449 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7450
7451 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7452 IEM_MC_COMMIT_EFLAGS(EFlags);
7453 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7454 IEM_MC_ADVANCE_RIP();
7455 IEM_MC_END();
7456 return VINF_SUCCESS;
7457
7458 case IEMMODE_64BIT:
7459 IEM_MC_BEGIN(4, 3);
7460 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7461 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7462#ifdef RT_ARCH_X86
7463 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7464#else
7465 IEM_MC_ARG(uint64_t, u64Src, 2);
7466#endif
7467 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7469 IEM_MC_LOCAL(uint64_t, u64Rax);
7470
7471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7472 IEMOP_HLP_DONE_DECODING();
7473 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7474 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7475 IEM_MC_FETCH_EFLAGS(EFlags);
7476 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7477#ifdef RT_ARCH_X86
7478 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7479 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7480 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7481 else
7482 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7483#else
7484 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7485 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7486 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7487 else
7488 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7489#endif
7490
7491 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7492 IEM_MC_COMMIT_EFLAGS(EFlags);
7493 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7494 IEM_MC_ADVANCE_RIP();
7495 IEM_MC_END();
7496 return VINF_SUCCESS;
7497
7498 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7499 }
7500 }
7501}
7502
7503
7504FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7505{
7506 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7507 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7508
7509 switch (pVCpu->iem.s.enmEffOpSize)
7510 {
7511 case IEMMODE_16BIT:
7512 IEM_MC_BEGIN(5, 1);
7513 IEM_MC_ARG(uint16_t, uSel, 0);
7514 IEM_MC_ARG(uint16_t, offSeg, 1);
7515 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7516 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7517 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7518 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7521 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7522 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7523 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7524 IEM_MC_END();
7525 return VINF_SUCCESS;
7526
7527 case IEMMODE_32BIT:
7528 IEM_MC_BEGIN(5, 1);
7529 IEM_MC_ARG(uint16_t, uSel, 0);
7530 IEM_MC_ARG(uint32_t, offSeg, 1);
7531 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7532 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7533 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7534 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7537 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7538 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7539 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7540 IEM_MC_END();
7541 return VINF_SUCCESS;
7542
7543 case IEMMODE_64BIT:
7544 IEM_MC_BEGIN(5, 1);
7545 IEM_MC_ARG(uint16_t, uSel, 0);
7546 IEM_MC_ARG(uint64_t, offSeg, 1);
7547 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7548 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7549 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7550 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7551 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7553 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7554 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7555 else
7556 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7557 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7558 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7559 IEM_MC_END();
7560 return VINF_SUCCESS;
7561
7562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7563 }
7564}
7565
7566
7567/** Opcode 0x0f 0xb2. */
7568FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7569{
7570 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7571 IEMOP_HLP_MIN_386();
7572 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7573 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7574 return IEMOP_RAISE_INVALID_OPCODE();
7575 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7576}
7577
7578
7579/** Opcode 0x0f 0xb3. */
7580FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7581{
7582 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7583 IEMOP_HLP_MIN_386();
7584 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7585}
7586
7587
7588/** Opcode 0x0f 0xb4. */
7589FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7590{
7591 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7592 IEMOP_HLP_MIN_386();
7593 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7594 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7595 return IEMOP_RAISE_INVALID_OPCODE();
7596 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7597}
7598
7599
7600/** Opcode 0x0f 0xb5. */
7601FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7602{
7603 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7604 IEMOP_HLP_MIN_386();
7605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7606 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7607 return IEMOP_RAISE_INVALID_OPCODE();
7608 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7609}
7610
7611
7612/** Opcode 0x0f 0xb6. */
7613FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7614{
7615 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7616 IEMOP_HLP_MIN_386();
7617
7618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7619
7620 /*
7621 * If rm is denoting a register, no more instruction bytes.
7622 */
7623 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7624 {
7625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7626 switch (pVCpu->iem.s.enmEffOpSize)
7627 {
7628 case IEMMODE_16BIT:
7629 IEM_MC_BEGIN(0, 1);
7630 IEM_MC_LOCAL(uint16_t, u16Value);
7631 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7632 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7633 IEM_MC_ADVANCE_RIP();
7634 IEM_MC_END();
7635 return VINF_SUCCESS;
7636
7637 case IEMMODE_32BIT:
7638 IEM_MC_BEGIN(0, 1);
7639 IEM_MC_LOCAL(uint32_t, u32Value);
7640 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7641 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7642 IEM_MC_ADVANCE_RIP();
7643 IEM_MC_END();
7644 return VINF_SUCCESS;
7645
7646 case IEMMODE_64BIT:
7647 IEM_MC_BEGIN(0, 1);
7648 IEM_MC_LOCAL(uint64_t, u64Value);
7649 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7650 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7651 IEM_MC_ADVANCE_RIP();
7652 IEM_MC_END();
7653 return VINF_SUCCESS;
7654
7655 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7656 }
7657 }
7658 else
7659 {
7660 /*
7661 * We're loading a register from memory.
7662 */
7663 switch (pVCpu->iem.s.enmEffOpSize)
7664 {
7665 case IEMMODE_16BIT:
7666 IEM_MC_BEGIN(0, 2);
7667 IEM_MC_LOCAL(uint16_t, u16Value);
7668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7671 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7672 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7673 IEM_MC_ADVANCE_RIP();
7674 IEM_MC_END();
7675 return VINF_SUCCESS;
7676
7677 case IEMMODE_32BIT:
7678 IEM_MC_BEGIN(0, 2);
7679 IEM_MC_LOCAL(uint32_t, u32Value);
7680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7683 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7684 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7685 IEM_MC_ADVANCE_RIP();
7686 IEM_MC_END();
7687 return VINF_SUCCESS;
7688
7689 case IEMMODE_64BIT:
7690 IEM_MC_BEGIN(0, 2);
7691 IEM_MC_LOCAL(uint64_t, u64Value);
7692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7695 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7696 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7697 IEM_MC_ADVANCE_RIP();
7698 IEM_MC_END();
7699 return VINF_SUCCESS;
7700
7701 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7702 }
7703 }
7704}
7705
7706
7707/** Opcode 0x0f 0xb7. */
7708FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7709{
7710 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7711 IEMOP_HLP_MIN_386();
7712
7713 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7714
7715 /** @todo Not entirely sure how the operand size prefix is handled here,
7716 * assuming that it will be ignored. Would be nice to have a few
7717 * test for this. */
7718 /*
7719 * If rm is denoting a register, no more instruction bytes.
7720 */
7721 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7722 {
7723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7724 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7725 {
7726 IEM_MC_BEGIN(0, 1);
7727 IEM_MC_LOCAL(uint32_t, u32Value);
7728 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7729 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7730 IEM_MC_ADVANCE_RIP();
7731 IEM_MC_END();
7732 }
7733 else
7734 {
7735 IEM_MC_BEGIN(0, 1);
7736 IEM_MC_LOCAL(uint64_t, u64Value);
7737 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7738 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7739 IEM_MC_ADVANCE_RIP();
7740 IEM_MC_END();
7741 }
7742 }
7743 else
7744 {
7745 /*
7746 * We're loading a register from memory.
7747 */
7748 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7749 {
7750 IEM_MC_BEGIN(0, 2);
7751 IEM_MC_LOCAL(uint32_t, u32Value);
7752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7755 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7756 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7757 IEM_MC_ADVANCE_RIP();
7758 IEM_MC_END();
7759 }
7760 else
7761 {
7762 IEM_MC_BEGIN(0, 2);
7763 IEM_MC_LOCAL(uint64_t, u64Value);
7764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7767 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7768 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7769 IEM_MC_ADVANCE_RIP();
7770 IEM_MC_END();
7771 }
7772 }
7773 return VINF_SUCCESS;
7774}
7775
7776
7777/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7778FNIEMOP_UD_STUB(iemOp_jmpe);
7779/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7780FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7781
7782
7783/**
7784 * @opcode 0xb9
7785 * @opinvalid intel-modrm
7786 * @optest ->
7787 */
7788FNIEMOP_DEF(iemOp_Grp10)
7789{
7790 /*
7791 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7792 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7793 */
7794 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7795 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7796 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7797}
7798
7799
7800/** Opcode 0x0f 0xba. */
7801FNIEMOP_DEF(iemOp_Grp8)
7802{
7803 IEMOP_HLP_MIN_386();
7804 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7805 PCIEMOPBINSIZES pImpl;
7806 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7807 {
7808 case 0: case 1: case 2: case 3:
7809 /* Both AMD and Intel want full modr/m decoding and imm8. */
7810 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7811 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7812 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7813 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7814 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7815 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7816 }
7817 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7818
7819 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7820 {
7821 /* register destination. */
7822 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7824
7825 switch (pVCpu->iem.s.enmEffOpSize)
7826 {
7827 case IEMMODE_16BIT:
7828 IEM_MC_BEGIN(3, 0);
7829 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7830 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7831 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7832
7833 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7834 IEM_MC_REF_EFLAGS(pEFlags);
7835 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7836
7837 IEM_MC_ADVANCE_RIP();
7838 IEM_MC_END();
7839 return VINF_SUCCESS;
7840
7841 case IEMMODE_32BIT:
7842 IEM_MC_BEGIN(3, 0);
7843 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7844 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7845 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7846
7847 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7848 IEM_MC_REF_EFLAGS(pEFlags);
7849 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7850
7851 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7852 IEM_MC_ADVANCE_RIP();
7853 IEM_MC_END();
7854 return VINF_SUCCESS;
7855
7856 case IEMMODE_64BIT:
7857 IEM_MC_BEGIN(3, 0);
7858 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7859 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7860 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7861
7862 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7863 IEM_MC_REF_EFLAGS(pEFlags);
7864 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7865
7866 IEM_MC_ADVANCE_RIP();
7867 IEM_MC_END();
7868 return VINF_SUCCESS;
7869
7870 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7871 }
7872 }
7873 else
7874 {
7875 /* memory destination. */
7876
7877 uint32_t fAccess;
7878 if (pImpl->pfnLockedU16)
7879 fAccess = IEM_ACCESS_DATA_RW;
7880 else /* BT */
7881 fAccess = IEM_ACCESS_DATA_R;
7882
7883 /** @todo test negative bit offsets! */
7884 switch (pVCpu->iem.s.enmEffOpSize)
7885 {
7886 case IEMMODE_16BIT:
7887 IEM_MC_BEGIN(3, 1);
7888 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7889 IEM_MC_ARG(uint16_t, u16Src, 1);
7890 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7892
7893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7894 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7895 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7896 if (pImpl->pfnLockedU16)
7897 IEMOP_HLP_DONE_DECODING();
7898 else
7899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7900 IEM_MC_FETCH_EFLAGS(EFlags);
7901 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7902 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7903 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7904 else
7905 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7906 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7907
7908 IEM_MC_COMMIT_EFLAGS(EFlags);
7909 IEM_MC_ADVANCE_RIP();
7910 IEM_MC_END();
7911 return VINF_SUCCESS;
7912
7913 case IEMMODE_32BIT:
7914 IEM_MC_BEGIN(3, 1);
7915 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7916 IEM_MC_ARG(uint32_t, u32Src, 1);
7917 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7919
7920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7921 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7922 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7923 if (pImpl->pfnLockedU16)
7924 IEMOP_HLP_DONE_DECODING();
7925 else
7926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7927 IEM_MC_FETCH_EFLAGS(EFlags);
7928 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7929 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7930 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7931 else
7932 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7933 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7934
7935 IEM_MC_COMMIT_EFLAGS(EFlags);
7936 IEM_MC_ADVANCE_RIP();
7937 IEM_MC_END();
7938 return VINF_SUCCESS;
7939
7940 case IEMMODE_64BIT:
7941 IEM_MC_BEGIN(3, 1);
7942 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7943 IEM_MC_ARG(uint64_t, u64Src, 1);
7944 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7946
7947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7948 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7949 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7950 if (pImpl->pfnLockedU16)
7951 IEMOP_HLP_DONE_DECODING();
7952 else
7953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7954 IEM_MC_FETCH_EFLAGS(EFlags);
7955 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7956 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7957 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7958 else
7959 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7960 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7961
7962 IEM_MC_COMMIT_EFLAGS(EFlags);
7963 IEM_MC_ADVANCE_RIP();
7964 IEM_MC_END();
7965 return VINF_SUCCESS;
7966
7967 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7968 }
7969 }
7970}
7971
7972
7973/** Opcode 0x0f 0xbb. */
7974FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7975{
7976 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7977 IEMOP_HLP_MIN_386();
7978 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7979}
7980
7981
7982/** Opcode 0x0f 0xbc. */
7983FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7984{
7985 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7986 IEMOP_HLP_MIN_386();
7987 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7988 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7989}
7990
7991
7992/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7993FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7994
7995
7996/** Opcode 0x0f 0xbd. */
7997FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7998{
7999 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
8000 IEMOP_HLP_MIN_386();
8001 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
8002 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
8003}
8004
8005
8006/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
8007FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
8008
8009
8010/** Opcode 0x0f 0xbe. */
8011FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
8012{
8013 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
8014 IEMOP_HLP_MIN_386();
8015
8016 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8017
8018 /*
8019 * If rm is denoting a register, no more instruction bytes.
8020 */
8021 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8022 {
8023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8024 switch (pVCpu->iem.s.enmEffOpSize)
8025 {
8026 case IEMMODE_16BIT:
8027 IEM_MC_BEGIN(0, 1);
8028 IEM_MC_LOCAL(uint16_t, u16Value);
8029 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8030 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
8031 IEM_MC_ADVANCE_RIP();
8032 IEM_MC_END();
8033 return VINF_SUCCESS;
8034
8035 case IEMMODE_32BIT:
8036 IEM_MC_BEGIN(0, 1);
8037 IEM_MC_LOCAL(uint32_t, u32Value);
8038 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8039 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8040 IEM_MC_ADVANCE_RIP();
8041 IEM_MC_END();
8042 return VINF_SUCCESS;
8043
8044 case IEMMODE_64BIT:
8045 IEM_MC_BEGIN(0, 1);
8046 IEM_MC_LOCAL(uint64_t, u64Value);
8047 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8048 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8049 IEM_MC_ADVANCE_RIP();
8050 IEM_MC_END();
8051 return VINF_SUCCESS;
8052
8053 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8054 }
8055 }
8056 else
8057 {
8058 /*
8059 * We're loading a register from memory.
8060 */
8061 switch (pVCpu->iem.s.enmEffOpSize)
8062 {
8063 case IEMMODE_16BIT:
8064 IEM_MC_BEGIN(0, 2);
8065 IEM_MC_LOCAL(uint16_t, u16Value);
8066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8069 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8070 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
8071 IEM_MC_ADVANCE_RIP();
8072 IEM_MC_END();
8073 return VINF_SUCCESS;
8074
8075 case IEMMODE_32BIT:
8076 IEM_MC_BEGIN(0, 2);
8077 IEM_MC_LOCAL(uint32_t, u32Value);
8078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8081 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8082 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8083 IEM_MC_ADVANCE_RIP();
8084 IEM_MC_END();
8085 return VINF_SUCCESS;
8086
8087 case IEMMODE_64BIT:
8088 IEM_MC_BEGIN(0, 2);
8089 IEM_MC_LOCAL(uint64_t, u64Value);
8090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8093 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8094 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8095 IEM_MC_ADVANCE_RIP();
8096 IEM_MC_END();
8097 return VINF_SUCCESS;
8098
8099 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8100 }
8101 }
8102}
8103
8104
8105/** Opcode 0x0f 0xbf. */
8106FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
8107{
8108 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
8109 IEMOP_HLP_MIN_386();
8110
8111 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8112
8113 /** @todo Not entirely sure how the operand size prefix is handled here,
8114 * assuming that it will be ignored. Would be nice to have a few
8115 * test for this. */
8116 /*
8117 * If rm is denoting a register, no more instruction bytes.
8118 */
8119 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8120 {
8121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8122 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8123 {
8124 IEM_MC_BEGIN(0, 1);
8125 IEM_MC_LOCAL(uint32_t, u32Value);
8126 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8127 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8128 IEM_MC_ADVANCE_RIP();
8129 IEM_MC_END();
8130 }
8131 else
8132 {
8133 IEM_MC_BEGIN(0, 1);
8134 IEM_MC_LOCAL(uint64_t, u64Value);
8135 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8136 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8137 IEM_MC_ADVANCE_RIP();
8138 IEM_MC_END();
8139 }
8140 }
8141 else
8142 {
8143 /*
8144 * We're loading a register from memory.
8145 */
8146 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8147 {
8148 IEM_MC_BEGIN(0, 2);
8149 IEM_MC_LOCAL(uint32_t, u32Value);
8150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8153 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8154 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8155 IEM_MC_ADVANCE_RIP();
8156 IEM_MC_END();
8157 }
8158 else
8159 {
8160 IEM_MC_BEGIN(0, 2);
8161 IEM_MC_LOCAL(uint64_t, u64Value);
8162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8165 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8166 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8167 IEM_MC_ADVANCE_RIP();
8168 IEM_MC_END();
8169 }
8170 }
8171 return VINF_SUCCESS;
8172}
8173
8174
8175/** Opcode 0x0f 0xc0. */
8176FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
8177{
8178 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8179 IEMOP_HLP_MIN_486();
8180 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
8181
8182 /*
8183 * If rm is denoting a register, no more instruction bytes.
8184 */
8185 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8186 {
8187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8188
8189 IEM_MC_BEGIN(3, 0);
8190 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8191 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8192 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8193
8194 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8195 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8196 IEM_MC_REF_EFLAGS(pEFlags);
8197 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8198
8199 IEM_MC_ADVANCE_RIP();
8200 IEM_MC_END();
8201 }
8202 else
8203 {
8204 /*
8205 * We're accessing memory.
8206 */
8207 IEM_MC_BEGIN(3, 3);
8208 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8209 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8210 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8211 IEM_MC_LOCAL(uint8_t, u8RegCopy);
8212 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8213
8214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8215 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8216 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8217 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
8218 IEM_MC_FETCH_EFLAGS(EFlags);
8219 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8220 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8221 else
8222 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
8223
8224 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8225 IEM_MC_COMMIT_EFLAGS(EFlags);
8226 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
8227 IEM_MC_ADVANCE_RIP();
8228 IEM_MC_END();
8229 return VINF_SUCCESS;
8230 }
8231 return VINF_SUCCESS;
8232}
8233
8234
8235/** Opcode 0x0f 0xc1. */
8236FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
8237{
8238 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
8239 IEMOP_HLP_MIN_486();
8240 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8241
8242 /*
8243 * If rm is denoting a register, no more instruction bytes.
8244 */
8245 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8246 {
8247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8248
8249 switch (pVCpu->iem.s.enmEffOpSize)
8250 {
8251 case IEMMODE_16BIT:
8252 IEM_MC_BEGIN(3, 0);
8253 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8254 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8255 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8256
8257 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8258 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8259 IEM_MC_REF_EFLAGS(pEFlags);
8260 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8261
8262 IEM_MC_ADVANCE_RIP();
8263 IEM_MC_END();
8264 return VINF_SUCCESS;
8265
8266 case IEMMODE_32BIT:
8267 IEM_MC_BEGIN(3, 0);
8268 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8269 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8270 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8271
8272 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8273 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8274 IEM_MC_REF_EFLAGS(pEFlags);
8275 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8276
8277 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8278 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
8279 IEM_MC_ADVANCE_RIP();
8280 IEM_MC_END();
8281 return VINF_SUCCESS;
8282
8283 case IEMMODE_64BIT:
8284 IEM_MC_BEGIN(3, 0);
8285 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8286 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8287 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8288
8289 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8290 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8291 IEM_MC_REF_EFLAGS(pEFlags);
8292 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8293
8294 IEM_MC_ADVANCE_RIP();
8295 IEM_MC_END();
8296 return VINF_SUCCESS;
8297
8298 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8299 }
8300 }
8301 else
8302 {
8303 /*
8304 * We're accessing memory.
8305 */
8306 switch (pVCpu->iem.s.enmEffOpSize)
8307 {
8308 case IEMMODE_16BIT:
8309 IEM_MC_BEGIN(3, 3);
8310 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8311 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8312 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8313 IEM_MC_LOCAL(uint16_t, u16RegCopy);
8314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8315
8316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8317 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8318 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8319 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
8320 IEM_MC_FETCH_EFLAGS(EFlags);
8321 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8322 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8323 else
8324 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
8325
8326 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8327 IEM_MC_COMMIT_EFLAGS(EFlags);
8328 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
8329 IEM_MC_ADVANCE_RIP();
8330 IEM_MC_END();
8331 return VINF_SUCCESS;
8332
8333 case IEMMODE_32BIT:
8334 IEM_MC_BEGIN(3, 3);
8335 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8336 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8337 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8338 IEM_MC_LOCAL(uint32_t, u32RegCopy);
8339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8340
8341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8342 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8343 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8344 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
8345 IEM_MC_FETCH_EFLAGS(EFlags);
8346 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8347 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8348 else
8349 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
8350
8351 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8352 IEM_MC_COMMIT_EFLAGS(EFlags);
8353 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
8354 IEM_MC_ADVANCE_RIP();
8355 IEM_MC_END();
8356 return VINF_SUCCESS;
8357
8358 case IEMMODE_64BIT:
8359 IEM_MC_BEGIN(3, 3);
8360 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8361 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8362 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8363 IEM_MC_LOCAL(uint64_t, u64RegCopy);
8364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8365
8366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8367 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8368 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8369 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
8370 IEM_MC_FETCH_EFLAGS(EFlags);
8371 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8372 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8373 else
8374 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
8375
8376 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8377 IEM_MC_COMMIT_EFLAGS(EFlags);
8378 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
8379 IEM_MC_ADVANCE_RIP();
8380 IEM_MC_END();
8381 return VINF_SUCCESS;
8382
8383 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8384 }
8385 }
8386}
8387
8388
8389/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
8390FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
8391/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
8392FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8393/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8394FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8395/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8396FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8397
8398
8399/** Opcode 0x0f 0xc3. */
8400FNIEMOP_DEF(iemOp_movnti_My_Gy)
8401{
8402 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8403
8404 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8405
8406 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8407 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8408 {
8409 switch (pVCpu->iem.s.enmEffOpSize)
8410 {
8411 case IEMMODE_32BIT:
8412 IEM_MC_BEGIN(0, 2);
8413 IEM_MC_LOCAL(uint32_t, u32Value);
8414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8415
8416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8418 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8419 return IEMOP_RAISE_INVALID_OPCODE();
8420
8421 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8422 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8423 IEM_MC_ADVANCE_RIP();
8424 IEM_MC_END();
8425 break;
8426
8427 case IEMMODE_64BIT:
8428 IEM_MC_BEGIN(0, 2);
8429 IEM_MC_LOCAL(uint64_t, u64Value);
8430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8431
8432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8434 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8435 return IEMOP_RAISE_INVALID_OPCODE();
8436
8437 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8438 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8439 IEM_MC_ADVANCE_RIP();
8440 IEM_MC_END();
8441 break;
8442
8443 case IEMMODE_16BIT:
8444 /** @todo check this form. */
8445 return IEMOP_RAISE_INVALID_OPCODE();
8446 }
8447 }
8448 else
8449 return IEMOP_RAISE_INVALID_OPCODE();
8450 return VINF_SUCCESS;
8451}
8452/* Opcode 0x66 0x0f 0xc3 - invalid */
8453/* Opcode 0xf3 0x0f 0xc3 - invalid */
8454/* Opcode 0xf2 0x0f 0xc3 - invalid */
8455
8456/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8457FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8458/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8459FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8460/* Opcode 0xf3 0x0f 0xc4 - invalid */
8461/* Opcode 0xf2 0x0f 0xc4 - invalid */
8462
8463/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8464FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8465/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8466FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8467/* Opcode 0xf3 0x0f 0xc5 - invalid */
8468/* Opcode 0xf2 0x0f 0xc5 - invalid */
8469
8470/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8471FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8472/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8473FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8474/* Opcode 0xf3 0x0f 0xc6 - invalid */
8475/* Opcode 0xf2 0x0f 0xc6 - invalid */
8476
8477
8478/** Opcode 0x0f 0xc7 !11/1. */
8479FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8480{
8481 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8482
8483 IEM_MC_BEGIN(4, 3);
8484 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8485 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8486 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8487 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8488 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8489 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8491
8492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8493 IEMOP_HLP_DONE_DECODING();
8494 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8495
8496 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8497 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8498 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8499
8500 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8501 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8502 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8503
8504 IEM_MC_FETCH_EFLAGS(EFlags);
8505 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8506 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8507 else
8508 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8509
8510 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8511 IEM_MC_COMMIT_EFLAGS(EFlags);
8512 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8513 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8514 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8515 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8516 IEM_MC_ENDIF();
8517 IEM_MC_ADVANCE_RIP();
8518
8519 IEM_MC_END();
8520 return VINF_SUCCESS;
8521}
8522
8523
8524/** Opcode REX.W 0x0f 0xc7 !11/1. */
8525FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8526{
8527 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8528 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8529 {
8530#if 0
8531 RT_NOREF(bRm);
8532 IEMOP_BITCH_ABOUT_STUB();
8533 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8534#else
8535 IEM_MC_BEGIN(4, 3);
8536 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8537 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8538 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8539 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8540 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8541 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8543
8544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8545 IEMOP_HLP_DONE_DECODING();
8546 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8547 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8548
8549 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8550 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8551 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8552
8553 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8554 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8555 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8556
8557 IEM_MC_FETCH_EFLAGS(EFlags);
8558# ifdef RT_ARCH_AMD64
8559 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8560 {
8561 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8562 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8563 else
8564 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8565 }
8566 else
8567# endif
8568 {
8569 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8570 accesses and not all all atomic, which works fine on in UNI CPU guest
8571 configuration (ignoring DMA). If guest SMP is active we have no choice
8572 but to use a rendezvous callback here. Sigh. */
8573 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8574 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8575 else
8576 {
8577 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8578 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8579 }
8580 }
8581
8582 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8583 IEM_MC_COMMIT_EFLAGS(EFlags);
8584 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8585 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8586 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8587 IEM_MC_ENDIF();
8588 IEM_MC_ADVANCE_RIP();
8589
8590 IEM_MC_END();
8591 return VINF_SUCCESS;
8592#endif
8593 }
8594 Log(("cmpxchg16b -> #UD\n"));
8595 return IEMOP_RAISE_INVALID_OPCODE();
8596}
8597
8598FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8599{
8600 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8601 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8602 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8603}
8604
8605/** Opcode 0x0f 0xc7 11/6. */
8606FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8607
8608/** Opcode 0x0f 0xc7 !11/6. */
8609#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8610FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
8611{
8612 IEMOP_MNEMONIC(vmptrld, "vmptrld");
8613 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
8614 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
8615 IEM_MC_BEGIN(2, 0);
8616 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8617 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
8618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8619 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
8620 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8621 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
8622 IEM_MC_END();
8623 return VINF_SUCCESS;
8624}
8625#else
8626FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8627#endif
8628
8629/** Opcode 0x66 0x0f 0xc7 !11/6. */
8630#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8631FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
8632{
8633 IEMOP_MNEMONIC(vmclear, "vmclear");
8634 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
8635 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
8636 IEM_MC_BEGIN(2, 0);
8637 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8638 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
8639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8640 IEMOP_HLP_DONE_DECODING();
8641 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8642 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
8643 IEM_MC_END();
8644 return VINF_SUCCESS;
8645}
8646#else
8647FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8648#endif
8649
8650/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8651#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8652FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
8653{
8654 IEMOP_MNEMONIC(vmxon, "vmxon");
8655 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
8656 IEM_MC_BEGIN(2, 0);
8657 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8658 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
8659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8660 IEMOP_HLP_DONE_DECODING();
8661 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8662 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
8663 IEM_MC_END();
8664 return VINF_SUCCESS;
8665}
8666#else
8667FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8668#endif
8669
8670/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8671#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8672FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
8673{
8674 IEMOP_MNEMONIC(vmptrst, "vmptrst");
8675 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
8676 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
8677 IEM_MC_BEGIN(2, 0);
8678 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8679 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
8680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8681 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
8682 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8683 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
8684 IEM_MC_END();
8685 return VINF_SUCCESS;
8686}
8687#else
8688FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8689#endif
8690
8691/** Opcode 0x0f 0xc7 11/7. */
8692FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8693
8694
8695/**
8696 * Group 9 jump table for register variant.
8697 */
8698IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8699{ /* pfx: none, 066h, 0f3h, 0f2h */
8700 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8701 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8702 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8703 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8704 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8705 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8706 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8707 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8708};
8709AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8710
8711
8712/**
8713 * Group 9 jump table for memory variant.
8714 */
8715IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8716{ /* pfx: none, 066h, 0f3h, 0f2h */
8717 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8718 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8719 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8720 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8721 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8722 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8723 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8724 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8725};
8726AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8727
8728
8729/** Opcode 0x0f 0xc7. */
8730FNIEMOP_DEF(iemOp_Grp9)
8731{
8732 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
8733 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8734 /* register, register */
8735 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8736 + pVCpu->iem.s.idxPrefix], bRm);
8737 /* memory, register */
8738 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8739 + pVCpu->iem.s.idxPrefix], bRm);
8740}
8741
8742
8743/**
8744 * Common 'bswap register' helper.
8745 */
8746FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8747{
8748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8749 switch (pVCpu->iem.s.enmEffOpSize)
8750 {
8751 case IEMMODE_16BIT:
8752 IEM_MC_BEGIN(1, 0);
8753 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8754 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8755 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8756 IEM_MC_ADVANCE_RIP();
8757 IEM_MC_END();
8758 return VINF_SUCCESS;
8759
8760 case IEMMODE_32BIT:
8761 IEM_MC_BEGIN(1, 0);
8762 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8763 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8764 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8765 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8766 IEM_MC_ADVANCE_RIP();
8767 IEM_MC_END();
8768 return VINF_SUCCESS;
8769
8770 case IEMMODE_64BIT:
8771 IEM_MC_BEGIN(1, 0);
8772 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8773 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8774 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8775 IEM_MC_ADVANCE_RIP();
8776 IEM_MC_END();
8777 return VINF_SUCCESS;
8778
8779 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8780 }
8781}
8782
8783
8784/** Opcode 0x0f 0xc8. */
8785FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8786{
8787 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8788 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8789 prefix. REX.B is the correct prefix it appears. For a parallel
8790 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8791 IEMOP_HLP_MIN_486();
8792 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8793}
8794
8795
8796/** Opcode 0x0f 0xc9. */
8797FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8798{
8799 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8800 IEMOP_HLP_MIN_486();
8801 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8802}
8803
8804
8805/** Opcode 0x0f 0xca. */
8806FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8807{
8808 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8809 IEMOP_HLP_MIN_486();
8810 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8811}
8812
8813
8814/** Opcode 0x0f 0xcb. */
8815FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8816{
8817 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8818 IEMOP_HLP_MIN_486();
8819 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8820}
8821
8822
8823/** Opcode 0x0f 0xcc. */
8824FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8825{
8826 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8827 IEMOP_HLP_MIN_486();
8828 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8829}
8830
8831
8832/** Opcode 0x0f 0xcd. */
8833FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8834{
8835 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8836 IEMOP_HLP_MIN_486();
8837 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8838}
8839
8840
8841/** Opcode 0x0f 0xce. */
8842FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8843{
8844 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8845 IEMOP_HLP_MIN_486();
8846 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8847}
8848
8849
8850/** Opcode 0x0f 0xcf. */
8851FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8852{
8853 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8854 IEMOP_HLP_MIN_486();
8855 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8856}
8857
8858
8859/* Opcode 0x0f 0xd0 - invalid */
8860/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8861FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8862/* Opcode 0xf3 0x0f 0xd0 - invalid */
8863/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8864FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8865
8866/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8867FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8868/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8869FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8870/* Opcode 0xf3 0x0f 0xd1 - invalid */
8871/* Opcode 0xf2 0x0f 0xd1 - invalid */
8872
8873/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8874FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8875/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8876FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8877/* Opcode 0xf3 0x0f 0xd2 - invalid */
8878/* Opcode 0xf2 0x0f 0xd2 - invalid */
8879
8880/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8881FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8882/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8883FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8884/* Opcode 0xf3 0x0f 0xd3 - invalid */
8885/* Opcode 0xf2 0x0f 0xd3 - invalid */
8886
8887/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8888FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8889/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8890FNIEMOP_STUB(iemOp_paddq_Vx_W);
8891/* Opcode 0xf3 0x0f 0xd4 - invalid */
8892/* Opcode 0xf2 0x0f 0xd4 - invalid */
8893
8894/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8895FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8896/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8897FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8898/* Opcode 0xf3 0x0f 0xd5 - invalid */
8899/* Opcode 0xf2 0x0f 0xd5 - invalid */
8900
8901/* Opcode 0x0f 0xd6 - invalid */
8902
8903/**
8904 * @opcode 0xd6
8905 * @oppfx 0x66
8906 * @opcpuid sse2
8907 * @opgroup og_sse2_pcksclr_datamove
8908 * @opxcpttype none
8909 * @optest op1=-1 op2=2 -> op1=2
8910 * @optest op1=0 op2=-42 -> op1=-42
8911 */
8912FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8913{
8914 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8915 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8916 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8917 {
8918 /*
8919 * Register, register.
8920 */
8921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8922 IEM_MC_BEGIN(0, 2);
8923 IEM_MC_LOCAL(uint64_t, uSrc);
8924
8925 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8926 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8927
8928 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8929 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8930
8931 IEM_MC_ADVANCE_RIP();
8932 IEM_MC_END();
8933 }
8934 else
8935 {
8936 /*
8937 * Memory, register.
8938 */
8939 IEM_MC_BEGIN(0, 2);
8940 IEM_MC_LOCAL(uint64_t, uSrc);
8941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8942
8943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8945 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8946 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8947
8948 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8949 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8950
8951 IEM_MC_ADVANCE_RIP();
8952 IEM_MC_END();
8953 }
8954 return VINF_SUCCESS;
8955}
8956
8957
8958/**
8959 * @opcode 0xd6
8960 * @opcodesub 11 mr/reg
8961 * @oppfx f3
8962 * @opcpuid sse2
8963 * @opgroup og_sse2_simdint_datamove
8964 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8965 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8966 */
8967FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8968{
8969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8970 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8971 {
8972 /*
8973 * Register, register.
8974 */
8975 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8977 IEM_MC_BEGIN(0, 1);
8978 IEM_MC_LOCAL(uint64_t, uSrc);
8979
8980 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8981 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8982
8983 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8984 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8985 IEM_MC_FPU_TO_MMX_MODE();
8986
8987 IEM_MC_ADVANCE_RIP();
8988 IEM_MC_END();
8989 return VINF_SUCCESS;
8990 }
8991
8992 /**
8993 * @opdone
8994 * @opmnemonic udf30fd6mem
8995 * @opcode 0xd6
8996 * @opcodesub !11 mr/reg
8997 * @oppfx f3
8998 * @opunused intel-modrm
8999 * @opcpuid sse
9000 * @optest ->
9001 */
9002 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
9003}
9004
9005
9006/**
9007 * @opcode 0xd6
9008 * @opcodesub 11 mr/reg
9009 * @oppfx f2
9010 * @opcpuid sse2
9011 * @opgroup og_sse2_simdint_datamove
9012 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
9013 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9014 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
9015 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
9016 * @optest op1=-42 op2=0xfedcba9876543210
9017 * -> op1=0xfedcba9876543210 ftw=0xff
9018 */
9019FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
9020{
9021 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9022 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9023 {
9024 /*
9025 * Register, register.
9026 */
9027 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9029 IEM_MC_BEGIN(0, 1);
9030 IEM_MC_LOCAL(uint64_t, uSrc);
9031
9032 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9033 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9034
9035 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9036 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
9037 IEM_MC_FPU_TO_MMX_MODE();
9038
9039 IEM_MC_ADVANCE_RIP();
9040 IEM_MC_END();
9041 return VINF_SUCCESS;
9042 }
9043
9044 /**
9045 * @opdone
9046 * @opmnemonic udf20fd6mem
9047 * @opcode 0xd6
9048 * @opcodesub !11 mr/reg
9049 * @oppfx f2
9050 * @opunused intel-modrm
9051 * @opcpuid sse
9052 * @optest ->
9053 */
9054 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
9055}
9056
9057/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
9058FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
9059{
9060 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
9061 /** @todo testcase: Check that the instruction implicitly clears the high
9062 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
9063 * and opcode modifications are made to work with the whole width (not
9064 * just 128). */
9065 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
9066 /* Docs says register only. */
9067 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9068 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
9069 {
9070 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
9071 IEM_MC_BEGIN(2, 0);
9072 IEM_MC_ARG(uint64_t *, pDst, 0);
9073 IEM_MC_ARG(uint64_t const *, pSrc, 1);
9074 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
9075 IEM_MC_PREPARE_FPU_USAGE();
9076 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9077 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
9078 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
9079 IEM_MC_ADVANCE_RIP();
9080 IEM_MC_END();
9081 return VINF_SUCCESS;
9082 }
9083 return IEMOP_RAISE_INVALID_OPCODE();
9084}
9085
9086/** Opcode 0x66 0x0f 0xd7 - */
9087FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
9088{
9089 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
9090 /** @todo testcase: Check that the instruction implicitly clears the high
9091 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
9092 * and opcode modifications are made to work with the whole width (not
9093 * just 128). */
9094 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
9095 /* Docs says register only. */
9096 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9097 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
9098 {
9099 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
9100 IEM_MC_BEGIN(2, 0);
9101 IEM_MC_ARG(uint64_t *, pDst, 0);
9102 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
9103 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9104 IEM_MC_PREPARE_SSE_USAGE();
9105 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9106 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9107 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
9108 IEM_MC_ADVANCE_RIP();
9109 IEM_MC_END();
9110 return VINF_SUCCESS;
9111 }
9112 return IEMOP_RAISE_INVALID_OPCODE();
9113}
9114
9115/* Opcode 0xf3 0x0f 0xd7 - invalid */
9116/* Opcode 0xf2 0x0f 0xd7 - invalid */
9117
9118
9119/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
9120FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
9121/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
9122FNIEMOP_STUB(iemOp_psubusb_Vx_W);
9123/* Opcode 0xf3 0x0f 0xd8 - invalid */
9124/* Opcode 0xf2 0x0f 0xd8 - invalid */
9125
9126/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
9127FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
9128/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
9129FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
9130/* Opcode 0xf3 0x0f 0xd9 - invalid */
9131/* Opcode 0xf2 0x0f 0xd9 - invalid */
9132
9133/** Opcode 0x0f 0xda - pminub Pq, Qq */
9134FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
9135/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
9136FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
9137/* Opcode 0xf3 0x0f 0xda - invalid */
9138/* Opcode 0xf2 0x0f 0xda - invalid */
9139
9140/** Opcode 0x0f 0xdb - pand Pq, Qq */
9141FNIEMOP_STUB(iemOp_pand_Pq_Qq);
9142/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
9143FNIEMOP_STUB(iemOp_pand_Vx_W);
9144/* Opcode 0xf3 0x0f 0xdb - invalid */
9145/* Opcode 0xf2 0x0f 0xdb - invalid */
9146
9147/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
9148FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
9149/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
9150FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
9151/* Opcode 0xf3 0x0f 0xdc - invalid */
9152/* Opcode 0xf2 0x0f 0xdc - invalid */
9153
9154/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
9155FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
9156/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
9157FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
9158/* Opcode 0xf3 0x0f 0xdd - invalid */
9159/* Opcode 0xf2 0x0f 0xdd - invalid */
9160
9161/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
9162FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
9163/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
9164FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
9165/* Opcode 0xf3 0x0f 0xde - invalid */
9166/* Opcode 0xf2 0x0f 0xde - invalid */
9167
9168/** Opcode 0x0f 0xdf - pandn Pq, Qq */
9169FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
9170/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
9171FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
9172/* Opcode 0xf3 0x0f 0xdf - invalid */
9173/* Opcode 0xf2 0x0f 0xdf - invalid */
9174
9175/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
9176FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
9177/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
9178FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
9179/* Opcode 0xf3 0x0f 0xe0 - invalid */
9180/* Opcode 0xf2 0x0f 0xe0 - invalid */
9181
9182/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
9183FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
9184/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
9185FNIEMOP_STUB(iemOp_psraw_Vx_W);
9186/* Opcode 0xf3 0x0f 0xe1 - invalid */
9187/* Opcode 0xf2 0x0f 0xe1 - invalid */
9188
9189/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
9190FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
9191/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
9192FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
9193/* Opcode 0xf3 0x0f 0xe2 - invalid */
9194/* Opcode 0xf2 0x0f 0xe2 - invalid */
9195
9196/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
9197FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
9198/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
9199FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
9200/* Opcode 0xf3 0x0f 0xe3 - invalid */
9201/* Opcode 0xf2 0x0f 0xe3 - invalid */
9202
9203/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
9204FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
9205/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
9206FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
9207/* Opcode 0xf3 0x0f 0xe4 - invalid */
9208/* Opcode 0xf2 0x0f 0xe4 - invalid */
9209
9210/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
9211FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
9212/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
9213FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
9214/* Opcode 0xf3 0x0f 0xe5 - invalid */
9215/* Opcode 0xf2 0x0f 0xe5 - invalid */
9216
9217/* Opcode 0x0f 0xe6 - invalid */
9218/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
9219FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
9220/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
9221FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
9222/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
9223FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
9224
9225
9226/**
9227 * @opcode 0xe7
9228 * @opcodesub !11 mr/reg
9229 * @oppfx none
9230 * @opcpuid sse
9231 * @opgroup og_sse1_cachect
9232 * @opxcpttype none
9233 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
9234 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9235 */
9236FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
9237{
9238 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9239 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9240 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9241 {
9242 /* Register, memory. */
9243 IEM_MC_BEGIN(0, 2);
9244 IEM_MC_LOCAL(uint64_t, uSrc);
9245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9246
9247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9249 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
9250 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9251
9252 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9253 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9254 IEM_MC_FPU_TO_MMX_MODE();
9255
9256 IEM_MC_ADVANCE_RIP();
9257 IEM_MC_END();
9258 return VINF_SUCCESS;
9259 }
9260 /**
9261 * @opdone
9262 * @opmnemonic ud0fe7reg
9263 * @opcode 0xe7
9264 * @opcodesub 11 mr/reg
9265 * @oppfx none
9266 * @opunused immediate
9267 * @opcpuid sse
9268 * @optest ->
9269 */
9270 return IEMOP_RAISE_INVALID_OPCODE();
9271}
9272
9273/**
9274 * @opcode 0xe7
9275 * @opcodesub !11 mr/reg
9276 * @oppfx 0x66
9277 * @opcpuid sse2
9278 * @opgroup og_sse2_cachect
9279 * @opxcpttype 1
9280 * @optest op1=-1 op2=2 -> op1=2
9281 * @optest op1=0 op2=-42 -> op1=-42
9282 */
9283FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
9284{
9285 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9287 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9288 {
9289 /* Register, memory. */
9290 IEM_MC_BEGIN(0, 2);
9291 IEM_MC_LOCAL(RTUINT128U, uSrc);
9292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9293
9294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9296 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9297 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9298
9299 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9300 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9301
9302 IEM_MC_ADVANCE_RIP();
9303 IEM_MC_END();
9304 return VINF_SUCCESS;
9305 }
9306
9307 /**
9308 * @opdone
9309 * @opmnemonic ud660fe7reg
9310 * @opcode 0xe7
9311 * @opcodesub 11 mr/reg
9312 * @oppfx 0x66
9313 * @opunused immediate
9314 * @opcpuid sse
9315 * @optest ->
9316 */
9317 return IEMOP_RAISE_INVALID_OPCODE();
9318}
9319
9320/* Opcode 0xf3 0x0f 0xe7 - invalid */
9321/* Opcode 0xf2 0x0f 0xe7 - invalid */
9322
9323
9324/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
9325FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
9326/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
9327FNIEMOP_STUB(iemOp_psubsb_Vx_W);
9328/* Opcode 0xf3 0x0f 0xe8 - invalid */
9329/* Opcode 0xf2 0x0f 0xe8 - invalid */
9330
9331/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
9332FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
9333/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
9334FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
9335/* Opcode 0xf3 0x0f 0xe9 - invalid */
9336/* Opcode 0xf2 0x0f 0xe9 - invalid */
9337
9338/** Opcode 0x0f 0xea - pminsw Pq, Qq */
9339FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
9340/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
9341FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
9342/* Opcode 0xf3 0x0f 0xea - invalid */
9343/* Opcode 0xf2 0x0f 0xea - invalid */
9344
9345/** Opcode 0x0f 0xeb - por Pq, Qq */
9346FNIEMOP_STUB(iemOp_por_Pq_Qq);
9347/** Opcode 0x66 0x0f 0xeb - por Vx, W */
9348FNIEMOP_STUB(iemOp_por_Vx_W);
9349/* Opcode 0xf3 0x0f 0xeb - invalid */
9350/* Opcode 0xf2 0x0f 0xeb - invalid */
9351
9352/** Opcode 0x0f 0xec - paddsb Pq, Qq */
9353FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
9354/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
9355FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
9356/* Opcode 0xf3 0x0f 0xec - invalid */
9357/* Opcode 0xf2 0x0f 0xec - invalid */
9358
9359/** Opcode 0x0f 0xed - paddsw Pq, Qq */
9360FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
9361/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
9362FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
9363/* Opcode 0xf3 0x0f 0xed - invalid */
9364/* Opcode 0xf2 0x0f 0xed - invalid */
9365
9366/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
9367FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
9368/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
9369FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
9370/* Opcode 0xf3 0x0f 0xee - invalid */
9371/* Opcode 0xf2 0x0f 0xee - invalid */
9372
9373
9374/** Opcode 0x0f 0xef - pxor Pq, Qq */
9375FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
9376{
9377 IEMOP_MNEMONIC(pxor, "pxor");
9378 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
9379}
9380
9381/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
9382FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
9383{
9384 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
9385 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
9386}
9387
9388/* Opcode 0xf3 0x0f 0xef - invalid */
9389/* Opcode 0xf2 0x0f 0xef - invalid */
9390
9391/* Opcode 0x0f 0xf0 - invalid */
9392/* Opcode 0x66 0x0f 0xf0 - invalid */
9393/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
9394FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
9395
9396/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
9397FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
9398/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
9399FNIEMOP_STUB(iemOp_psllw_Vx_W);
9400/* Opcode 0xf2 0x0f 0xf1 - invalid */
9401
9402/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
9403FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
9404/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
9405FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
9406/* Opcode 0xf2 0x0f 0xf2 - invalid */
9407
9408/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
9409FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
9410/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
9411FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
9412/* Opcode 0xf2 0x0f 0xf3 - invalid */
9413
9414/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
9415FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
9416/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
9417FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
9418/* Opcode 0xf2 0x0f 0xf4 - invalid */
9419
9420/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
9421FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
9422/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
9423FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
9424/* Opcode 0xf2 0x0f 0xf5 - invalid */
9425
9426/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
9427FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
9428/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
9429FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
9430/* Opcode 0xf2 0x0f 0xf6 - invalid */
9431
9432/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
9433FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
9434/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
9435FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
9436/* Opcode 0xf2 0x0f 0xf7 - invalid */
9437
9438/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
9439FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
9440/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
9441FNIEMOP_STUB(iemOp_psubb_Vx_W);
9442/* Opcode 0xf2 0x0f 0xf8 - invalid */
9443
9444/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
9445FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
9446/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
9447FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
9448/* Opcode 0xf2 0x0f 0xf9 - invalid */
9449
9450/** Opcode 0x0f 0xfa - psubd Pq, Qq */
9451FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
9452/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
9453FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
9454/* Opcode 0xf2 0x0f 0xfa - invalid */
9455
9456/** Opcode 0x0f 0xfb - psubq Pq, Qq */
9457FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
9458/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
9459FNIEMOP_STUB(iemOp_psubq_Vx_W);
9460/* Opcode 0xf2 0x0f 0xfb - invalid */
9461
9462/** Opcode 0x0f 0xfc - paddb Pq, Qq */
9463FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
9464/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
9465FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
9466/* Opcode 0xf2 0x0f 0xfc - invalid */
9467
9468/** Opcode 0x0f 0xfd - paddw Pq, Qq */
9469FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
9470/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
9471FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
9472/* Opcode 0xf2 0x0f 0xfd - invalid */
9473
9474/** Opcode 0x0f 0xfe - paddd Pq, Qq */
9475FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
9476/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
9477FNIEMOP_STUB(iemOp_paddd_Vx_W);
9478/* Opcode 0xf2 0x0f 0xfe - invalid */
9479
9480
9481/** Opcode **** 0x0f 0xff - UD0 */
9482FNIEMOP_DEF(iemOp_ud0)
9483{
9484 IEMOP_MNEMONIC(ud0, "ud0");
9485 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
9486 {
9487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
9488#ifndef TST_IEM_CHECK_MC
9489 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9490 {
9491 RTGCPTR GCPtrEff;
9492 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
9493 if (rcStrict != VINF_SUCCESS)
9494 return rcStrict;
9495 }
9496#endif
9497 IEMOP_HLP_DONE_DECODING();
9498 }
9499 return IEMOP_RAISE_INVALID_OPCODE();
9500}
9501
9502
9503
9504/**
9505 * Two byte opcode map, first byte 0x0f.
9506 *
9507 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9508 * check if it needs updating as well when making changes.
9509 */
9510IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9511{
9512 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9513 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9514 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9515 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9516 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9517 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9518 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9519 /* 0x06 */ IEMOP_X4(iemOp_clts),
9520 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9521 /* 0x08 */ IEMOP_X4(iemOp_invd),
9522 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9523 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9524 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9525 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9526 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9527 /* 0x0e */ IEMOP_X4(iemOp_femms),
9528 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9529
9530 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9531 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9532 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9533 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9534 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9535 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9536 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9537 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9538 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9539 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9540 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9541 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9542 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9543 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9544 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9545 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9546
9547 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9548 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9549 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9550 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9551 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9552 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9553 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9554 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9555 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9556 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9557 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9558 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9559 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9560 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9561 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9562 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9563
9564 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9565 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9566 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9567 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9568 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9569 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9570 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9571 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9572 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9573 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9574 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9575 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9576 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9577 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9578 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9579 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9580
9581 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9582 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9583 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9584 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9585 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9586 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9587 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9588 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9589 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9590 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9591 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9592 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9593 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9594 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9595 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9596 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9597
9598 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9599 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9600 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9601 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9602 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9603 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9604 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9605 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9606 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9607 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9608 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9609 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9610 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9611 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9612 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9613 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9614
9615 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9616 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9617 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9618 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9619 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9620 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9621 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9622 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9623 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9624 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9625 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9626 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9627 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9628 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9629 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9630 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
9631
9632 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9633 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9634 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9635 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9636 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9637 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9638 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9639 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9640
9641 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9642 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9643 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9644 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9645 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9646 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9647 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9648 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9649
9650 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9651 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9652 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9653 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9654 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9655 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9656 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9657 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9658 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9659 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9660 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9661 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9662 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9663 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9664 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9665 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9666
9667 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9668 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9669 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9670 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9671 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9672 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9673 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9674 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9675 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9676 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9677 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9678 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9679 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9680 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9681 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9682 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9683
9684 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9685 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9686 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9687 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9688 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9689 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9690 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9691 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9692 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9693 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9694 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9695 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9696 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9697 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9698 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9699 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9700
9701 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9702 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9703 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9704 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9705 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9706 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9707 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9708 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9709 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9710 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9711 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9712 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9713 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9714 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9715 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9716 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9717
9718 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9719 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9720 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9721 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9722 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9723 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9724 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9725 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9726 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9727 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9728 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9729 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9730 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9731 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9732 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9733 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9734
9735 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9736 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9737 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9738 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9739 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9740 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9741 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9742 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9743 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9744 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9745 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9746 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9747 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9748 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9749 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9750 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9751
9752 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9753 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9754 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9755 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9756 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9757 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9758 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9759 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9760 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9761 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9762 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9763 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9764 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9765 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9766 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9767 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9768
9769 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9770 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9771 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9772 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9773 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9774 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9775 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9776 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9777 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9778 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9779 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9780 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9781 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9782 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9783 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9784 /* 0xff */ IEMOP_X4(iemOp_ud0),
9785};
9786AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9787
9788/** @} */
9789
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette