VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 96014

Last change on this file since 96014 was 96010, checked in by vboxsync, 3 years ago

VMM/IEM: Implement [v]pmaxs{b,w,d} instructions, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 367.6 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 96010 2022-08-03 20:15:46Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27
28/**
29 * Common worker for MMX instructions on the form:
30 * pxxx mm1, mm2/mem64
31 */
32FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
33{
34 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
35 if (IEM_IS_MODRM_REG_MODE(bRm))
36 {
37 /*
38 * Register, register.
39 */
40 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
41 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
42 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
43 IEM_MC_BEGIN(2, 0);
44 IEM_MC_ARG(uint64_t *, pDst, 0);
45 IEM_MC_ARG(uint64_t const *, pSrc, 1);
46 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
47 IEM_MC_PREPARE_FPU_USAGE();
48 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
49 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
50 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
51 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
52 IEM_MC_FPU_TO_MMX_MODE();
53 IEM_MC_ADVANCE_RIP();
54 IEM_MC_END();
55 }
56 else
57 {
58 /*
59 * Register, memory.
60 */
61 IEM_MC_BEGIN(2, 2);
62 IEM_MC_ARG(uint64_t *, pDst, 0);
63 IEM_MC_LOCAL(uint64_t, uSrc);
64 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
65 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
66
67 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
68 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
69 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
70 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
71
72 IEM_MC_PREPARE_FPU_USAGE();
73 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
74 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
75 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
76 IEM_MC_FPU_TO_MMX_MODE();
77
78 IEM_MC_ADVANCE_RIP();
79 IEM_MC_END();
80 }
81 return VINF_SUCCESS;
82}
83
84
85/**
86 * Common worker for MMX instructions on the form:
87 * pxxx mm1, mm2/mem64
88 *
89 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
90 * no FXSAVE state, just the operands.
91 */
92FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
93{
94 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
95 if (IEM_IS_MODRM_REG_MODE(bRm))
96 {
97 /*
98 * Register, register.
99 */
100 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
101 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
103 IEM_MC_BEGIN(2, 0);
104 IEM_MC_ARG(uint64_t *, pDst, 0);
105 IEM_MC_ARG(uint64_t const *, pSrc, 1);
106 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
107 IEM_MC_PREPARE_FPU_USAGE();
108 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
109 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
110 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
111 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
112 IEM_MC_FPU_TO_MMX_MODE();
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 }
116 else
117 {
118 /*
119 * Register, memory.
120 */
121 IEM_MC_BEGIN(2, 2);
122 IEM_MC_ARG(uint64_t *, pDst, 0);
123 IEM_MC_LOCAL(uint64_t, uSrc);
124 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
126
127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
129 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
130 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
131
132 IEM_MC_PREPARE_FPU_USAGE();
133 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
134 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
135 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
136 IEM_MC_FPU_TO_MMX_MODE();
137
138 IEM_MC_ADVANCE_RIP();
139 IEM_MC_END();
140 }
141 return VINF_SUCCESS;
142}
143
144
145/**
146 * Common worker for MMX instructions on the form:
147 * pxxx mm1, mm2/mem64
148 * for instructions introduced with SSE.
149 */
150FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
151{
152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
153 if (IEM_IS_MODRM_REG_MODE(bRm))
154 {
155 /*
156 * Register, register.
157 */
158 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
159 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
161 IEM_MC_BEGIN(2, 0);
162 IEM_MC_ARG(uint64_t *, pDst, 0);
163 IEM_MC_ARG(uint64_t const *, pSrc, 1);
164 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
165 IEM_MC_PREPARE_FPU_USAGE();
166 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
167 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
168 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
169 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
170 IEM_MC_FPU_TO_MMX_MODE();
171 IEM_MC_ADVANCE_RIP();
172 IEM_MC_END();
173 }
174 else
175 {
176 /*
177 * Register, memory.
178 */
179 IEM_MC_BEGIN(2, 2);
180 IEM_MC_ARG(uint64_t *, pDst, 0);
181 IEM_MC_LOCAL(uint64_t, uSrc);
182 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
184
185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
187 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
188 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
189
190 IEM_MC_PREPARE_FPU_USAGE();
191 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
192 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
193 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
194 IEM_MC_FPU_TO_MMX_MODE();
195
196 IEM_MC_ADVANCE_RIP();
197 IEM_MC_END();
198 }
199 return VINF_SUCCESS;
200}
201
202
203/**
204 * Common worker for MMX instructions on the form:
205 * pxxx mm1, mm2/mem64
206 * that was introduced with SSE2.
207 */
208FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported)
209{
210 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
211 if (IEM_IS_MODRM_REG_MODE(bRm))
212 {
213 /*
214 * Register, register.
215 */
216 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
217 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
219 IEM_MC_BEGIN(2, 0);
220 IEM_MC_ARG(uint64_t *, pDst, 0);
221 IEM_MC_ARG(uint64_t const *, pSrc, 1);
222 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
223 IEM_MC_PREPARE_FPU_USAGE();
224 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
225 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
226 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
227 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
228 IEM_MC_FPU_TO_MMX_MODE();
229 IEM_MC_ADVANCE_RIP();
230 IEM_MC_END();
231 }
232 else
233 {
234 /*
235 * Register, memory.
236 */
237 IEM_MC_BEGIN(2, 2);
238 IEM_MC_ARG(uint64_t *, pDst, 0);
239 IEM_MC_LOCAL(uint64_t, uSrc);
240 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
242
243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
245 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
246 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
247
248 IEM_MC_PREPARE_FPU_USAGE();
249 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
250 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
251 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
252 IEM_MC_FPU_TO_MMX_MODE();
253
254 IEM_MC_ADVANCE_RIP();
255 IEM_MC_END();
256 }
257 return VINF_SUCCESS;
258}
259
260
261/**
262 * Common worker for SSE2 instructions on the forms:
263 * pxxx xmm1, xmm2/mem128
264 *
265 * Proper alignment of the 128-bit operand is enforced.
266 * Exceptions type 4. SSE2 cpuid checks.
267 *
268 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
269 */
270FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
271{
272 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
273 if (IEM_IS_MODRM_REG_MODE(bRm))
274 {
275 /*
276 * Register, register.
277 */
278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
279 IEM_MC_BEGIN(2, 0);
280 IEM_MC_ARG(PRTUINT128U, pDst, 0);
281 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
282 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
283 IEM_MC_PREPARE_SSE_USAGE();
284 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
285 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
286 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
287 IEM_MC_ADVANCE_RIP();
288 IEM_MC_END();
289 }
290 else
291 {
292 /*
293 * Register, memory.
294 */
295 IEM_MC_BEGIN(2, 2);
296 IEM_MC_ARG(PRTUINT128U, pDst, 0);
297 IEM_MC_LOCAL(RTUINT128U, uSrc);
298 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
300
301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
303 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
304 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
305
306 IEM_MC_PREPARE_SSE_USAGE();
307 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
308 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
309
310 IEM_MC_ADVANCE_RIP();
311 IEM_MC_END();
312 }
313 return VINF_SUCCESS;
314}
315
316
317/**
318 * Common worker for SSE2 instructions on the forms:
319 * pxxx xmm1, xmm2/mem128
320 *
321 * Proper alignment of the 128-bit operand is enforced.
322 * Exceptions type 4. SSE2 cpuid checks.
323 *
324 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
325 * no FXSAVE state, just the operands.
326 *
327 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
328 */
329FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
330{
331 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
332 if (IEM_IS_MODRM_REG_MODE(bRm))
333 {
334 /*
335 * Register, register.
336 */
337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
338 IEM_MC_BEGIN(2, 0);
339 IEM_MC_ARG(PRTUINT128U, pDst, 0);
340 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
341 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
342 IEM_MC_PREPARE_SSE_USAGE();
343 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
344 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
345 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
346 IEM_MC_ADVANCE_RIP();
347 IEM_MC_END();
348 }
349 else
350 {
351 /*
352 * Register, memory.
353 */
354 IEM_MC_BEGIN(2, 2);
355 IEM_MC_ARG(PRTUINT128U, pDst, 0);
356 IEM_MC_LOCAL(RTUINT128U, uSrc);
357 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
359
360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
362 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
363 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
364
365 IEM_MC_PREPARE_SSE_USAGE();
366 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
367 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
368
369 IEM_MC_ADVANCE_RIP();
370 IEM_MC_END();
371 }
372 return VINF_SUCCESS;
373}
374
375
376/** Opcode 0x0f 0x00 /0. */
377FNIEMOPRM_DEF(iemOp_Grp6_sldt)
378{
379 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
380 IEMOP_HLP_MIN_286();
381 IEMOP_HLP_NO_REAL_OR_V86_MODE();
382
383 if (IEM_IS_MODRM_REG_MODE(bRm))
384 {
385 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
386 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
387 }
388
389 /* Ignore operand size here, memory refs are always 16-bit. */
390 IEM_MC_BEGIN(2, 0);
391 IEM_MC_ARG(uint16_t, iEffSeg, 0);
392 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
394 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
395 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
396 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
397 IEM_MC_END();
398 return VINF_SUCCESS;
399}
400
401
402/** Opcode 0x0f 0x00 /1. */
403FNIEMOPRM_DEF(iemOp_Grp6_str)
404{
405 IEMOP_MNEMONIC(str, "str Rv/Mw");
406 IEMOP_HLP_MIN_286();
407 IEMOP_HLP_NO_REAL_OR_V86_MODE();
408
409
410 if (IEM_IS_MODRM_REG_MODE(bRm))
411 {
412 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
413 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
414 }
415
416 /* Ignore operand size here, memory refs are always 16-bit. */
417 IEM_MC_BEGIN(2, 0);
418 IEM_MC_ARG(uint16_t, iEffSeg, 0);
419 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
421 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
422 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
423 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
424 IEM_MC_END();
425 return VINF_SUCCESS;
426}
427
428
429/** Opcode 0x0f 0x00 /2. */
430FNIEMOPRM_DEF(iemOp_Grp6_lldt)
431{
432 IEMOP_MNEMONIC(lldt, "lldt Ew");
433 IEMOP_HLP_MIN_286();
434 IEMOP_HLP_NO_REAL_OR_V86_MODE();
435
436 if (IEM_IS_MODRM_REG_MODE(bRm))
437 {
438 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
439 IEM_MC_BEGIN(1, 0);
440 IEM_MC_ARG(uint16_t, u16Sel, 0);
441 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
442 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
443 IEM_MC_END();
444 }
445 else
446 {
447 IEM_MC_BEGIN(1, 1);
448 IEM_MC_ARG(uint16_t, u16Sel, 0);
449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
451 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
452 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
453 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
454 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
455 IEM_MC_END();
456 }
457 return VINF_SUCCESS;
458}
459
460
461/** Opcode 0x0f 0x00 /3. */
462FNIEMOPRM_DEF(iemOp_Grp6_ltr)
463{
464 IEMOP_MNEMONIC(ltr, "ltr Ew");
465 IEMOP_HLP_MIN_286();
466 IEMOP_HLP_NO_REAL_OR_V86_MODE();
467
468 if (IEM_IS_MODRM_REG_MODE(bRm))
469 {
470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
471 IEM_MC_BEGIN(1, 0);
472 IEM_MC_ARG(uint16_t, u16Sel, 0);
473 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
474 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
475 IEM_MC_END();
476 }
477 else
478 {
479 IEM_MC_BEGIN(1, 1);
480 IEM_MC_ARG(uint16_t, u16Sel, 0);
481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
484 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
485 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
486 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
487 IEM_MC_END();
488 }
489 return VINF_SUCCESS;
490}
491
492
493/** Opcode 0x0f 0x00 /3. */
494FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
495{
496 IEMOP_HLP_MIN_286();
497 IEMOP_HLP_NO_REAL_OR_V86_MODE();
498
499 if (IEM_IS_MODRM_REG_MODE(bRm))
500 {
501 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
502 IEM_MC_BEGIN(2, 0);
503 IEM_MC_ARG(uint16_t, u16Sel, 0);
504 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
505 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
506 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
507 IEM_MC_END();
508 }
509 else
510 {
511 IEM_MC_BEGIN(2, 1);
512 IEM_MC_ARG(uint16_t, u16Sel, 0);
513 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
516 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
517 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
518 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
519 IEM_MC_END();
520 }
521 return VINF_SUCCESS;
522}
523
524
525/** Opcode 0x0f 0x00 /4. */
526FNIEMOPRM_DEF(iemOp_Grp6_verr)
527{
528 IEMOP_MNEMONIC(verr, "verr Ew");
529 IEMOP_HLP_MIN_286();
530 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
531}
532
533
534/** Opcode 0x0f 0x00 /5. */
535FNIEMOPRM_DEF(iemOp_Grp6_verw)
536{
537 IEMOP_MNEMONIC(verw, "verw Ew");
538 IEMOP_HLP_MIN_286();
539 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
540}
541
542
543/**
544 * Group 6 jump table.
545 */
546IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
547{
548 iemOp_Grp6_sldt,
549 iemOp_Grp6_str,
550 iemOp_Grp6_lldt,
551 iemOp_Grp6_ltr,
552 iemOp_Grp6_verr,
553 iemOp_Grp6_verw,
554 iemOp_InvalidWithRM,
555 iemOp_InvalidWithRM
556};
557
558/** Opcode 0x0f 0x00. */
559FNIEMOP_DEF(iemOp_Grp6)
560{
561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
562 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
563}
564
565
566/** Opcode 0x0f 0x01 /0. */
567FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
568{
569 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
570 IEMOP_HLP_MIN_286();
571 IEMOP_HLP_64BIT_OP_SIZE();
572 IEM_MC_BEGIN(2, 1);
573 IEM_MC_ARG(uint8_t, iEffSeg, 0);
574 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
578 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
579 IEM_MC_END();
580 return VINF_SUCCESS;
581}
582
583
584/** Opcode 0x0f 0x01 /0. */
585FNIEMOP_DEF(iemOp_Grp7_vmcall)
586{
587 IEMOP_MNEMONIC(vmcall, "vmcall");
588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
589
590 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
591 want all hypercalls regardless of instruction used, and if a
592 hypercall isn't handled by GIM or HMSvm will raise an #UD.
593 (NEM/win makes ASSUMPTIONS about this behavior.) */
594 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
595}
596
597
598/** Opcode 0x0f 0x01 /0. */
599#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
600FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
601{
602 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
603 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
604 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
605 IEMOP_HLP_DONE_DECODING();
606 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
607}
608#else
609FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
610{
611 IEMOP_BITCH_ABOUT_STUB();
612 return IEMOP_RAISE_INVALID_OPCODE();
613}
614#endif
615
616
617/** Opcode 0x0f 0x01 /0. */
618#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
619FNIEMOP_DEF(iemOp_Grp7_vmresume)
620{
621 IEMOP_MNEMONIC(vmresume, "vmresume");
622 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
623 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
624 IEMOP_HLP_DONE_DECODING();
625 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
626}
627#else
628FNIEMOP_DEF(iemOp_Grp7_vmresume)
629{
630 IEMOP_BITCH_ABOUT_STUB();
631 return IEMOP_RAISE_INVALID_OPCODE();
632}
633#endif
634
635
636/** Opcode 0x0f 0x01 /0. */
637#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
638FNIEMOP_DEF(iemOp_Grp7_vmxoff)
639{
640 IEMOP_MNEMONIC(vmxoff, "vmxoff");
641 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
642 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
643 IEMOP_HLP_DONE_DECODING();
644 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
645}
646#else
647FNIEMOP_DEF(iemOp_Grp7_vmxoff)
648{
649 IEMOP_BITCH_ABOUT_STUB();
650 return IEMOP_RAISE_INVALID_OPCODE();
651}
652#endif
653
654
655/** Opcode 0x0f 0x01 /1. */
656FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
657{
658 IEMOP_MNEMONIC(sidt, "sidt Ms");
659 IEMOP_HLP_MIN_286();
660 IEMOP_HLP_64BIT_OP_SIZE();
661 IEM_MC_BEGIN(2, 1);
662 IEM_MC_ARG(uint8_t, iEffSeg, 0);
663 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
666 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
667 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
668 IEM_MC_END();
669 return VINF_SUCCESS;
670}
671
672
673/** Opcode 0x0f 0x01 /1. */
674FNIEMOP_DEF(iemOp_Grp7_monitor)
675{
676 IEMOP_MNEMONIC(monitor, "monitor");
677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
678 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
679}
680
681
682/** Opcode 0x0f 0x01 /1. */
683FNIEMOP_DEF(iemOp_Grp7_mwait)
684{
685 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
687 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
688}
689
690
691/** Opcode 0x0f 0x01 /2. */
692FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
693{
694 IEMOP_MNEMONIC(lgdt, "lgdt");
695 IEMOP_HLP_64BIT_OP_SIZE();
696 IEM_MC_BEGIN(3, 1);
697 IEM_MC_ARG(uint8_t, iEffSeg, 0);
698 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
699 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
702 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
703 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
704 IEM_MC_END();
705 return VINF_SUCCESS;
706}
707
708
709/** Opcode 0x0f 0x01 0xd0. */
710FNIEMOP_DEF(iemOp_Grp7_xgetbv)
711{
712 IEMOP_MNEMONIC(xgetbv, "xgetbv");
713 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
714 {
715 /** @todo r=ramshankar: We should use
716 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
717 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
718 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
719 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
720 }
721 return IEMOP_RAISE_INVALID_OPCODE();
722}
723
724
725/** Opcode 0x0f 0x01 0xd1. */
726FNIEMOP_DEF(iemOp_Grp7_xsetbv)
727{
728 IEMOP_MNEMONIC(xsetbv, "xsetbv");
729 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
730 {
731 /** @todo r=ramshankar: We should use
732 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
733 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
734 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
735 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
736 }
737 return IEMOP_RAISE_INVALID_OPCODE();
738}
739
740
741/** Opcode 0x0f 0x01 /3. */
742FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
743{
744 IEMOP_MNEMONIC(lidt, "lidt");
745 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
746 ? IEMMODE_64BIT
747 : pVCpu->iem.s.enmEffOpSize;
748 IEM_MC_BEGIN(3, 1);
749 IEM_MC_ARG(uint8_t, iEffSeg, 0);
750 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
751 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
754 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
755 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
756 IEM_MC_END();
757 return VINF_SUCCESS;
758}
759
760
761/** Opcode 0x0f 0x01 0xd8. */
762#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
763FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
764{
765 IEMOP_MNEMONIC(vmrun, "vmrun");
766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
767 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
768}
769#else
770FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
771#endif
772
773/** Opcode 0x0f 0x01 0xd9. */
774FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
775{
776 IEMOP_MNEMONIC(vmmcall, "vmmcall");
777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
778
779 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
780 want all hypercalls regardless of instruction used, and if a
781 hypercall isn't handled by GIM or HMSvm will raise an #UD.
782 (NEM/win makes ASSUMPTIONS about this behavior.) */
783 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
784}
785
786/** Opcode 0x0f 0x01 0xda. */
787#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
788FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
789{
790 IEMOP_MNEMONIC(vmload, "vmload");
791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
792 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
793}
794#else
795FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
796#endif
797
798
799/** Opcode 0x0f 0x01 0xdb. */
800#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
801FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
802{
803 IEMOP_MNEMONIC(vmsave, "vmsave");
804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
805 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
806}
807#else
808FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
809#endif
810
811
812/** Opcode 0x0f 0x01 0xdc. */
813#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
814FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
815{
816 IEMOP_MNEMONIC(stgi, "stgi");
817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
818 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
819}
820#else
821FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
822#endif
823
824
825/** Opcode 0x0f 0x01 0xdd. */
826#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
827FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
828{
829 IEMOP_MNEMONIC(clgi, "clgi");
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
831 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
832}
833#else
834FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
835#endif
836
837
838/** Opcode 0x0f 0x01 0xdf. */
839#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
840FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
841{
842 IEMOP_MNEMONIC(invlpga, "invlpga");
843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
844 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
845}
846#else
847FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
848#endif
849
850
851/** Opcode 0x0f 0x01 0xde. */
852#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
853FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
854{
855 IEMOP_MNEMONIC(skinit, "skinit");
856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
857 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
858}
859#else
860FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
861#endif
862
863
864/** Opcode 0x0f 0x01 /4. */
865FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
866{
867 IEMOP_MNEMONIC(smsw, "smsw");
868 IEMOP_HLP_MIN_286();
869 if (IEM_IS_MODRM_REG_MODE(bRm))
870 {
871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
872 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
873 }
874
875 /* Ignore operand size here, memory refs are always 16-bit. */
876 IEM_MC_BEGIN(2, 0);
877 IEM_MC_ARG(uint16_t, iEffSeg, 0);
878 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
881 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
882 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
883 IEM_MC_END();
884 return VINF_SUCCESS;
885}
886
887
888/** Opcode 0x0f 0x01 /6. */
889FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
890{
891 /* The operand size is effectively ignored, all is 16-bit and only the
892 lower 3-bits are used. */
893 IEMOP_MNEMONIC(lmsw, "lmsw");
894 IEMOP_HLP_MIN_286();
895 if (IEM_IS_MODRM_REG_MODE(bRm))
896 {
897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
898 IEM_MC_BEGIN(2, 0);
899 IEM_MC_ARG(uint16_t, u16Tmp, 0);
900 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
901 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
902 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
903 IEM_MC_END();
904 }
905 else
906 {
907 IEM_MC_BEGIN(2, 0);
908 IEM_MC_ARG(uint16_t, u16Tmp, 0);
909 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
912 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
913 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
914 IEM_MC_END();
915 }
916 return VINF_SUCCESS;
917}
918
919
920/** Opcode 0x0f 0x01 /7. */
921FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
922{
923 IEMOP_MNEMONIC(invlpg, "invlpg");
924 IEMOP_HLP_MIN_486();
925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
926 IEM_MC_BEGIN(1, 1);
927 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
928 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
929 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
930 IEM_MC_END();
931 return VINF_SUCCESS;
932}
933
934
935/** Opcode 0x0f 0x01 /7. */
936FNIEMOP_DEF(iemOp_Grp7_swapgs)
937{
938 IEMOP_MNEMONIC(swapgs, "swapgs");
939 IEMOP_HLP_ONLY_64BIT();
940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
941 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
942}
943
944
945/** Opcode 0x0f 0x01 /7. */
946FNIEMOP_DEF(iemOp_Grp7_rdtscp)
947{
948 IEMOP_MNEMONIC(rdtscp, "rdtscp");
949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
950 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
951}
952
953
954/**
955 * Group 7 jump table, memory variant.
956 */
957IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
958{
959 iemOp_Grp7_sgdt,
960 iemOp_Grp7_sidt,
961 iemOp_Grp7_lgdt,
962 iemOp_Grp7_lidt,
963 iemOp_Grp7_smsw,
964 iemOp_InvalidWithRM,
965 iemOp_Grp7_lmsw,
966 iemOp_Grp7_invlpg
967};
968
969
970/** Opcode 0x0f 0x01. */
971FNIEMOP_DEF(iemOp_Grp7)
972{
973 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
974 if (IEM_IS_MODRM_MEM_MODE(bRm))
975 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
976
977 switch (IEM_GET_MODRM_REG_8(bRm))
978 {
979 case 0:
980 switch (IEM_GET_MODRM_RM_8(bRm))
981 {
982 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
983 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
984 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
985 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
986 }
987 return IEMOP_RAISE_INVALID_OPCODE();
988
989 case 1:
990 switch (IEM_GET_MODRM_RM_8(bRm))
991 {
992 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
993 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
994 }
995 return IEMOP_RAISE_INVALID_OPCODE();
996
997 case 2:
998 switch (IEM_GET_MODRM_RM_8(bRm))
999 {
1000 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1001 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1002 }
1003 return IEMOP_RAISE_INVALID_OPCODE();
1004
1005 case 3:
1006 switch (IEM_GET_MODRM_RM_8(bRm))
1007 {
1008 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1009 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1010 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1011 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1012 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1013 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1014 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1015 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1016 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1017 }
1018
1019 case 4:
1020 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1021
1022 case 5:
1023 return IEMOP_RAISE_INVALID_OPCODE();
1024
1025 case 6:
1026 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1027
1028 case 7:
1029 switch (IEM_GET_MODRM_RM_8(bRm))
1030 {
1031 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1032 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1033 }
1034 return IEMOP_RAISE_INVALID_OPCODE();
1035
1036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1037 }
1038}
1039
1040/** Opcode 0x0f 0x00 /3. */
1041FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1042{
1043 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1044 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1045
1046 if (IEM_IS_MODRM_REG_MODE(bRm))
1047 {
1048 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1049 switch (pVCpu->iem.s.enmEffOpSize)
1050 {
1051 case IEMMODE_16BIT:
1052 {
1053 IEM_MC_BEGIN(3, 0);
1054 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1055 IEM_MC_ARG(uint16_t, u16Sel, 1);
1056 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1057
1058 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1059 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1060 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1061
1062 IEM_MC_END();
1063 return VINF_SUCCESS;
1064 }
1065
1066 case IEMMODE_32BIT:
1067 case IEMMODE_64BIT:
1068 {
1069 IEM_MC_BEGIN(3, 0);
1070 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1071 IEM_MC_ARG(uint16_t, u16Sel, 1);
1072 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1073
1074 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1075 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1076 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1077
1078 IEM_MC_END();
1079 return VINF_SUCCESS;
1080 }
1081
1082 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1083 }
1084 }
1085 else
1086 {
1087 switch (pVCpu->iem.s.enmEffOpSize)
1088 {
1089 case IEMMODE_16BIT:
1090 {
1091 IEM_MC_BEGIN(3, 1);
1092 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1093 IEM_MC_ARG(uint16_t, u16Sel, 1);
1094 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1096
1097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1098 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1099
1100 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1101 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1102 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1103
1104 IEM_MC_END();
1105 return VINF_SUCCESS;
1106 }
1107
1108 case IEMMODE_32BIT:
1109 case IEMMODE_64BIT:
1110 {
1111 IEM_MC_BEGIN(3, 1);
1112 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1113 IEM_MC_ARG(uint16_t, u16Sel, 1);
1114 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1116
1117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1118 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1119/** @todo testcase: make sure it's a 16-bit read. */
1120
1121 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1122 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1123 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1124
1125 IEM_MC_END();
1126 return VINF_SUCCESS;
1127 }
1128
1129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1130 }
1131 }
1132}
1133
1134
1135
1136/** Opcode 0x0f 0x02. */
1137FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1138{
1139 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1140 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1141}
1142
1143
1144/** Opcode 0x0f 0x03. */
1145FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1146{
1147 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1148 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1149}
1150
1151
1152/** Opcode 0x0f 0x05. */
1153FNIEMOP_DEF(iemOp_syscall)
1154{
1155 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1157 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1158}
1159
1160
1161/** Opcode 0x0f 0x06. */
1162FNIEMOP_DEF(iemOp_clts)
1163{
1164 IEMOP_MNEMONIC(clts, "clts");
1165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1166 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1167}
1168
1169
1170/** Opcode 0x0f 0x07. */
1171FNIEMOP_DEF(iemOp_sysret)
1172{
1173 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1175 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1176}
1177
1178
1179/** Opcode 0x0f 0x08. */
1180FNIEMOP_DEF(iemOp_invd)
1181{
1182 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1183 IEMOP_HLP_MIN_486();
1184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1185 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
1186}
1187
1188
1189/** Opcode 0x0f 0x09. */
1190FNIEMOP_DEF(iemOp_wbinvd)
1191{
1192 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1193 IEMOP_HLP_MIN_486();
1194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1195 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
1196}
1197
1198
1199/** Opcode 0x0f 0x0b. */
1200FNIEMOP_DEF(iemOp_ud2)
1201{
1202 IEMOP_MNEMONIC(ud2, "ud2");
1203 return IEMOP_RAISE_INVALID_OPCODE();
1204}
1205
1206/** Opcode 0x0f 0x0d. */
1207FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1208{
1209 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1210 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1211 {
1212 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1213 return IEMOP_RAISE_INVALID_OPCODE();
1214 }
1215
1216 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1217 if (IEM_IS_MODRM_REG_MODE(bRm))
1218 {
1219 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1220 return IEMOP_RAISE_INVALID_OPCODE();
1221 }
1222
1223 switch (IEM_GET_MODRM_REG_8(bRm))
1224 {
1225 case 2: /* Aliased to /0 for the time being. */
1226 case 4: /* Aliased to /0 for the time being. */
1227 case 5: /* Aliased to /0 for the time being. */
1228 case 6: /* Aliased to /0 for the time being. */
1229 case 7: /* Aliased to /0 for the time being. */
1230 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1231 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1232 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1233 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1234 }
1235
1236 IEM_MC_BEGIN(0, 1);
1237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1240 /* Currently a NOP. */
1241 NOREF(GCPtrEffSrc);
1242 IEM_MC_ADVANCE_RIP();
1243 IEM_MC_END();
1244 return VINF_SUCCESS;
1245}
1246
1247
1248/** Opcode 0x0f 0x0e. */
1249FNIEMOP_DEF(iemOp_femms)
1250{
1251 IEMOP_MNEMONIC(femms, "femms");
1252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1253
1254 IEM_MC_BEGIN(0,0);
1255 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
1256 IEM_MC_MAYBE_RAISE_FPU_XCPT();
1257 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
1258 IEM_MC_FPU_FROM_MMX_MODE();
1259 IEM_MC_ADVANCE_RIP();
1260 IEM_MC_END();
1261 return VINF_SUCCESS;
1262}
1263
1264
1265/** Opcode 0x0f 0x0f. */
1266FNIEMOP_DEF(iemOp_3Dnow)
1267{
1268 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1269 {
1270 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1271 return IEMOP_RAISE_INVALID_OPCODE();
1272 }
1273
1274#ifdef IEM_WITH_3DNOW
1275 /* This is pretty sparse, use switch instead of table. */
1276 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1277 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
1278#else
1279 IEMOP_BITCH_ABOUT_STUB();
1280 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1281#endif
1282}
1283
1284
1285/**
1286 * @opcode 0x10
1287 * @oppfx none
1288 * @opcpuid sse
1289 * @opgroup og_sse_simdfp_datamove
1290 * @opxcpttype 4UA
1291 * @optest op1=1 op2=2 -> op1=2
1292 * @optest op1=0 op2=-22 -> op1=-22
1293 */
1294FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1295{
1296 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1298 if (IEM_IS_MODRM_REG_MODE(bRm))
1299 {
1300 /*
1301 * Register, register.
1302 */
1303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1304 IEM_MC_BEGIN(0, 0);
1305 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1306 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1307 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
1308 IEM_GET_MODRM_RM(pVCpu, bRm));
1309 IEM_MC_ADVANCE_RIP();
1310 IEM_MC_END();
1311 }
1312 else
1313 {
1314 /*
1315 * Memory, register.
1316 */
1317 IEM_MC_BEGIN(0, 2);
1318 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1320
1321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1323 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1324 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1325
1326 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1327 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1328
1329 IEM_MC_ADVANCE_RIP();
1330 IEM_MC_END();
1331 }
1332 return VINF_SUCCESS;
1333
1334}
1335
1336
1337/**
1338 * @opcode 0x10
1339 * @oppfx 0x66
1340 * @opcpuid sse2
1341 * @opgroup og_sse2_pcksclr_datamove
1342 * @opxcpttype 4UA
1343 * @optest op1=1 op2=2 -> op1=2
1344 * @optest op1=0 op2=-42 -> op1=-42
1345 */
1346FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1347{
1348 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1349 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1350 if (IEM_IS_MODRM_REG_MODE(bRm))
1351 {
1352 /*
1353 * Register, register.
1354 */
1355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1356 IEM_MC_BEGIN(0, 0);
1357 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1358 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1359 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
1360 IEM_GET_MODRM_RM(pVCpu, bRm));
1361 IEM_MC_ADVANCE_RIP();
1362 IEM_MC_END();
1363 }
1364 else
1365 {
1366 /*
1367 * Memory, register.
1368 */
1369 IEM_MC_BEGIN(0, 2);
1370 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1372
1373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1375 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1376 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1377
1378 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1379 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1380
1381 IEM_MC_ADVANCE_RIP();
1382 IEM_MC_END();
1383 }
1384 return VINF_SUCCESS;
1385}
1386
1387
1388/**
1389 * @opcode 0x10
1390 * @oppfx 0xf3
1391 * @opcpuid sse
1392 * @opgroup og_sse_simdfp_datamove
1393 * @opxcpttype 5
1394 * @optest op1=1 op2=2 -> op1=2
1395 * @optest op1=0 op2=-22 -> op1=-22
1396 */
1397FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1398{
1399 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1400 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1401 if (IEM_IS_MODRM_REG_MODE(bRm))
1402 {
1403 /*
1404 * Register, register.
1405 */
1406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1407 IEM_MC_BEGIN(0, 1);
1408 IEM_MC_LOCAL(uint32_t, uSrc);
1409
1410 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1411 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1412 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1413 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1414
1415 IEM_MC_ADVANCE_RIP();
1416 IEM_MC_END();
1417 }
1418 else
1419 {
1420 /*
1421 * Memory, register.
1422 */
1423 IEM_MC_BEGIN(0, 2);
1424 IEM_MC_LOCAL(uint32_t, uSrc);
1425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1426
1427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1429 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1430 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1431
1432 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1433 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1434
1435 IEM_MC_ADVANCE_RIP();
1436 IEM_MC_END();
1437 }
1438 return VINF_SUCCESS;
1439}
1440
1441
1442/**
1443 * @opcode 0x10
1444 * @oppfx 0xf2
1445 * @opcpuid sse2
1446 * @opgroup og_sse2_pcksclr_datamove
1447 * @opxcpttype 5
1448 * @optest op1=1 op2=2 -> op1=2
1449 * @optest op1=0 op2=-42 -> op1=-42
1450 */
1451FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1452{
1453 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1454 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1455 if (IEM_IS_MODRM_REG_MODE(bRm))
1456 {
1457 /*
1458 * Register, register.
1459 */
1460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1461 IEM_MC_BEGIN(0, 1);
1462 IEM_MC_LOCAL(uint64_t, uSrc);
1463
1464 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1466 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1467 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1468
1469 IEM_MC_ADVANCE_RIP();
1470 IEM_MC_END();
1471 }
1472 else
1473 {
1474 /*
1475 * Memory, register.
1476 */
1477 IEM_MC_BEGIN(0, 2);
1478 IEM_MC_LOCAL(uint64_t, uSrc);
1479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1480
1481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1483 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1484 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1485
1486 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1487 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1488
1489 IEM_MC_ADVANCE_RIP();
1490 IEM_MC_END();
1491 }
1492 return VINF_SUCCESS;
1493}
1494
1495
1496/**
1497 * @opcode 0x11
1498 * @oppfx none
1499 * @opcpuid sse
1500 * @opgroup og_sse_simdfp_datamove
1501 * @opxcpttype 4UA
1502 * @optest op1=1 op2=2 -> op1=2
1503 * @optest op1=0 op2=-42 -> op1=-42
1504 */
1505FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1506{
1507 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1508 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1509 if (IEM_IS_MODRM_REG_MODE(bRm))
1510 {
1511 /*
1512 * Register, register.
1513 */
1514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1515 IEM_MC_BEGIN(0, 0);
1516 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1517 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1518 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
1519 IEM_GET_MODRM_REG(pVCpu, bRm));
1520 IEM_MC_ADVANCE_RIP();
1521 IEM_MC_END();
1522 }
1523 else
1524 {
1525 /*
1526 * Memory, register.
1527 */
1528 IEM_MC_BEGIN(0, 2);
1529 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1531
1532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1534 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1535 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1536
1537 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1538 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1539
1540 IEM_MC_ADVANCE_RIP();
1541 IEM_MC_END();
1542 }
1543 return VINF_SUCCESS;
1544}
1545
1546
1547/**
1548 * @opcode 0x11
1549 * @oppfx 0x66
1550 * @opcpuid sse2
1551 * @opgroup og_sse2_pcksclr_datamove
1552 * @opxcpttype 4UA
1553 * @optest op1=1 op2=2 -> op1=2
1554 * @optest op1=0 op2=-42 -> op1=-42
1555 */
1556FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1557{
1558 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1560 if (IEM_IS_MODRM_REG_MODE(bRm))
1561 {
1562 /*
1563 * Register, register.
1564 */
1565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1566 IEM_MC_BEGIN(0, 0);
1567 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1568 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1569 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
1570 IEM_GET_MODRM_REG(pVCpu, bRm));
1571 IEM_MC_ADVANCE_RIP();
1572 IEM_MC_END();
1573 }
1574 else
1575 {
1576 /*
1577 * Memory, register.
1578 */
1579 IEM_MC_BEGIN(0, 2);
1580 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1582
1583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1585 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1586 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1587
1588 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1589 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1590
1591 IEM_MC_ADVANCE_RIP();
1592 IEM_MC_END();
1593 }
1594 return VINF_SUCCESS;
1595}
1596
1597
1598/**
1599 * @opcode 0x11
1600 * @oppfx 0xf3
1601 * @opcpuid sse
1602 * @opgroup og_sse_simdfp_datamove
1603 * @opxcpttype 5
1604 * @optest op1=1 op2=2 -> op1=2
1605 * @optest op1=0 op2=-22 -> op1=-22
1606 */
1607FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1608{
1609 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1610 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1611 if (IEM_IS_MODRM_REG_MODE(bRm))
1612 {
1613 /*
1614 * Register, register.
1615 */
1616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1617 IEM_MC_BEGIN(0, 1);
1618 IEM_MC_LOCAL(uint32_t, uSrc);
1619
1620 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1621 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1622 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1623 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
1624
1625 IEM_MC_ADVANCE_RIP();
1626 IEM_MC_END();
1627 }
1628 else
1629 {
1630 /*
1631 * Memory, register.
1632 */
1633 IEM_MC_BEGIN(0, 2);
1634 IEM_MC_LOCAL(uint32_t, uSrc);
1635 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1636
1637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1639 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1640 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1641
1642 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1643 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1644
1645 IEM_MC_ADVANCE_RIP();
1646 IEM_MC_END();
1647 }
1648 return VINF_SUCCESS;
1649}
1650
1651
1652/**
1653 * @opcode 0x11
1654 * @oppfx 0xf2
1655 * @opcpuid sse2
1656 * @opgroup og_sse2_pcksclr_datamove
1657 * @opxcpttype 5
1658 * @optest op1=1 op2=2 -> op1=2
1659 * @optest op1=0 op2=-42 -> op1=-42
1660 */
1661FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1662{
1663 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1665 if (IEM_IS_MODRM_REG_MODE(bRm))
1666 {
1667 /*
1668 * Register, register.
1669 */
1670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1671 IEM_MC_BEGIN(0, 1);
1672 IEM_MC_LOCAL(uint64_t, uSrc);
1673
1674 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1675 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1676 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1677 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
1678
1679 IEM_MC_ADVANCE_RIP();
1680 IEM_MC_END();
1681 }
1682 else
1683 {
1684 /*
1685 * Memory, register.
1686 */
1687 IEM_MC_BEGIN(0, 2);
1688 IEM_MC_LOCAL(uint64_t, uSrc);
1689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1690
1691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1693 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1694 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1695
1696 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1697 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1698
1699 IEM_MC_ADVANCE_RIP();
1700 IEM_MC_END();
1701 }
1702 return VINF_SUCCESS;
1703}
1704
1705
1706FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1707{
1708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1709 if (IEM_IS_MODRM_REG_MODE(bRm))
1710 {
1711 /**
1712 * @opcode 0x12
1713 * @opcodesub 11 mr/reg
1714 * @oppfx none
1715 * @opcpuid sse
1716 * @opgroup og_sse_simdfp_datamove
1717 * @opxcpttype 5
1718 * @optest op1=1 op2=2 -> op1=2
1719 * @optest op1=0 op2=-42 -> op1=-42
1720 */
1721 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1722
1723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1724 IEM_MC_BEGIN(0, 1);
1725 IEM_MC_LOCAL(uint64_t, uSrc);
1726
1727 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1728 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1729 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1730 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1731
1732 IEM_MC_ADVANCE_RIP();
1733 IEM_MC_END();
1734 }
1735 else
1736 {
1737 /**
1738 * @opdone
1739 * @opcode 0x12
1740 * @opcodesub !11 mr/reg
1741 * @oppfx none
1742 * @opcpuid sse
1743 * @opgroup og_sse_simdfp_datamove
1744 * @opxcpttype 5
1745 * @optest op1=1 op2=2 -> op1=2
1746 * @optest op1=0 op2=-42 -> op1=-42
1747 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1748 */
1749 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1750
1751 IEM_MC_BEGIN(0, 2);
1752 IEM_MC_LOCAL(uint64_t, uSrc);
1753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1754
1755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1757 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1758 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1759
1760 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1761 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1762
1763 IEM_MC_ADVANCE_RIP();
1764 IEM_MC_END();
1765 }
1766 return VINF_SUCCESS;
1767}
1768
1769
1770/**
1771 * @opcode 0x12
1772 * @opcodesub !11 mr/reg
1773 * @oppfx 0x66
1774 * @opcpuid sse2
1775 * @opgroup og_sse2_pcksclr_datamove
1776 * @opxcpttype 5
1777 * @optest op1=1 op2=2 -> op1=2
1778 * @optest op1=0 op2=-42 -> op1=-42
1779 */
1780FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1781{
1782 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1783 if (IEM_IS_MODRM_MEM_MODE(bRm))
1784 {
1785 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1786
1787 IEM_MC_BEGIN(0, 2);
1788 IEM_MC_LOCAL(uint64_t, uSrc);
1789 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1790
1791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1793 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1794 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1795
1796 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1797 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1798
1799 IEM_MC_ADVANCE_RIP();
1800 IEM_MC_END();
1801 return VINF_SUCCESS;
1802 }
1803
1804 /**
1805 * @opdone
1806 * @opmnemonic ud660f12m3
1807 * @opcode 0x12
1808 * @opcodesub 11 mr/reg
1809 * @oppfx 0x66
1810 * @opunused immediate
1811 * @opcpuid sse
1812 * @optest ->
1813 */
1814 return IEMOP_RAISE_INVALID_OPCODE();
1815}
1816
1817
1818/**
1819 * @opcode 0x12
1820 * @oppfx 0xf3
1821 * @opcpuid sse3
1822 * @opgroup og_sse3_pcksclr_datamove
1823 * @opxcpttype 4
1824 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1825 * op1=0x00000002000000020000000100000001
1826 */
1827FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1828{
1829 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1830 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1831 if (IEM_IS_MODRM_REG_MODE(bRm))
1832 {
1833 /*
1834 * Register, register.
1835 */
1836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1837 IEM_MC_BEGIN(2, 0);
1838 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1839 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1840
1841 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1842 IEM_MC_PREPARE_SSE_USAGE();
1843
1844 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1845 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1846 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1847
1848 IEM_MC_ADVANCE_RIP();
1849 IEM_MC_END();
1850 }
1851 else
1852 {
1853 /*
1854 * Register, memory.
1855 */
1856 IEM_MC_BEGIN(2, 2);
1857 IEM_MC_LOCAL(RTUINT128U, uSrc);
1858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1859 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1860 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1861
1862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1865 IEM_MC_PREPARE_SSE_USAGE();
1866
1867 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1868 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1869 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1870
1871 IEM_MC_ADVANCE_RIP();
1872 IEM_MC_END();
1873 }
1874 return VINF_SUCCESS;
1875}
1876
1877
1878/**
1879 * @opcode 0x12
1880 * @oppfx 0xf2
1881 * @opcpuid sse3
1882 * @opgroup og_sse3_pcksclr_datamove
1883 * @opxcpttype 5
1884 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1885 * op1=0x22222222111111112222222211111111
1886 */
1887FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1888{
1889 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1890 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1891 if (IEM_IS_MODRM_REG_MODE(bRm))
1892 {
1893 /*
1894 * Register, register.
1895 */
1896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1897 IEM_MC_BEGIN(2, 0);
1898 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1899 IEM_MC_ARG(uint64_t, uSrc, 1);
1900
1901 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1902 IEM_MC_PREPARE_SSE_USAGE();
1903
1904 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1905 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1906 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1907
1908 IEM_MC_ADVANCE_RIP();
1909 IEM_MC_END();
1910 }
1911 else
1912 {
1913 /*
1914 * Register, memory.
1915 */
1916 IEM_MC_BEGIN(2, 2);
1917 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1918 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1919 IEM_MC_ARG(uint64_t, uSrc, 1);
1920
1921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1923 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1924 IEM_MC_PREPARE_SSE_USAGE();
1925
1926 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1927 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1928 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1929
1930 IEM_MC_ADVANCE_RIP();
1931 IEM_MC_END();
1932 }
1933 return VINF_SUCCESS;
1934}
1935
1936
1937/**
1938 * @opcode 0x13
1939 * @opcodesub !11 mr/reg
1940 * @oppfx none
1941 * @opcpuid sse
1942 * @opgroup og_sse_simdfp_datamove
1943 * @opxcpttype 5
1944 * @optest op1=1 op2=2 -> op1=2
1945 * @optest op1=0 op2=-42 -> op1=-42
1946 */
1947FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1948{
1949 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1950 if (IEM_IS_MODRM_MEM_MODE(bRm))
1951 {
1952 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1953
1954 IEM_MC_BEGIN(0, 2);
1955 IEM_MC_LOCAL(uint64_t, uSrc);
1956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1957
1958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1960 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1961 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1962
1963 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1964 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1965
1966 IEM_MC_ADVANCE_RIP();
1967 IEM_MC_END();
1968 return VINF_SUCCESS;
1969 }
1970
1971 /**
1972 * @opdone
1973 * @opmnemonic ud0f13m3
1974 * @opcode 0x13
1975 * @opcodesub 11 mr/reg
1976 * @oppfx none
1977 * @opunused immediate
1978 * @opcpuid sse
1979 * @optest ->
1980 */
1981 return IEMOP_RAISE_INVALID_OPCODE();
1982}
1983
1984
1985/**
1986 * @opcode 0x13
1987 * @opcodesub !11 mr/reg
1988 * @oppfx 0x66
1989 * @opcpuid sse2
1990 * @opgroup og_sse2_pcksclr_datamove
1991 * @opxcpttype 5
1992 * @optest op1=1 op2=2 -> op1=2
1993 * @optest op1=0 op2=-42 -> op1=-42
1994 */
1995FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1996{
1997 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1998 if (IEM_IS_MODRM_MEM_MODE(bRm))
1999 {
2000 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2001 IEM_MC_BEGIN(0, 2);
2002 IEM_MC_LOCAL(uint64_t, uSrc);
2003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2004
2005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2007 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2008 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2009
2010 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2011 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2012
2013 IEM_MC_ADVANCE_RIP();
2014 IEM_MC_END();
2015 return VINF_SUCCESS;
2016 }
2017
2018 /**
2019 * @opdone
2020 * @opmnemonic ud660f13m3
2021 * @opcode 0x13
2022 * @opcodesub 11 mr/reg
2023 * @oppfx 0x66
2024 * @opunused immediate
2025 * @opcpuid sse
2026 * @optest ->
2027 */
2028 return IEMOP_RAISE_INVALID_OPCODE();
2029}
2030
2031
2032/**
2033 * @opmnemonic udf30f13
2034 * @opcode 0x13
2035 * @oppfx 0xf3
2036 * @opunused intel-modrm
2037 * @opcpuid sse
2038 * @optest ->
2039 * @opdone
2040 */
2041
2042/**
2043 * @opmnemonic udf20f13
2044 * @opcode 0x13
2045 * @oppfx 0xf2
2046 * @opunused intel-modrm
2047 * @opcpuid sse
2048 * @optest ->
2049 * @opdone
2050 */
2051
2052/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2053FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
2054/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2055FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
2056
2057/**
2058 * @opdone
2059 * @opmnemonic udf30f14
2060 * @opcode 0x14
2061 * @oppfx 0xf3
2062 * @opunused intel-modrm
2063 * @opcpuid sse
2064 * @optest ->
2065 * @opdone
2066 */
2067
2068/**
2069 * @opmnemonic udf20f14
2070 * @opcode 0x14
2071 * @oppfx 0xf2
2072 * @opunused intel-modrm
2073 * @opcpuid sse
2074 * @optest ->
2075 * @opdone
2076 */
2077
2078/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2079FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
2080/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2081FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
2082/* Opcode 0xf3 0x0f 0x15 - invalid */
2083/* Opcode 0xf2 0x0f 0x15 - invalid */
2084
2085/**
2086 * @opdone
2087 * @opmnemonic udf30f15
2088 * @opcode 0x15
2089 * @oppfx 0xf3
2090 * @opunused intel-modrm
2091 * @opcpuid sse
2092 * @optest ->
2093 * @opdone
2094 */
2095
2096/**
2097 * @opmnemonic udf20f15
2098 * @opcode 0x15
2099 * @oppfx 0xf2
2100 * @opunused intel-modrm
2101 * @opcpuid sse
2102 * @optest ->
2103 * @opdone
2104 */
2105
2106FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2107{
2108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2109 if (IEM_IS_MODRM_REG_MODE(bRm))
2110 {
2111 /**
2112 * @opcode 0x16
2113 * @opcodesub 11 mr/reg
2114 * @oppfx none
2115 * @opcpuid sse
2116 * @opgroup og_sse_simdfp_datamove
2117 * @opxcpttype 5
2118 * @optest op1=1 op2=2 -> op1=2
2119 * @optest op1=0 op2=-42 -> op1=-42
2120 */
2121 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2122
2123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2124 IEM_MC_BEGIN(0, 1);
2125 IEM_MC_LOCAL(uint64_t, uSrc);
2126
2127 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2128 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2129 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2130 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2131
2132 IEM_MC_ADVANCE_RIP();
2133 IEM_MC_END();
2134 }
2135 else
2136 {
2137 /**
2138 * @opdone
2139 * @opcode 0x16
2140 * @opcodesub !11 mr/reg
2141 * @oppfx none
2142 * @opcpuid sse
2143 * @opgroup og_sse_simdfp_datamove
2144 * @opxcpttype 5
2145 * @optest op1=1 op2=2 -> op1=2
2146 * @optest op1=0 op2=-42 -> op1=-42
2147 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2148 */
2149 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2150
2151 IEM_MC_BEGIN(0, 2);
2152 IEM_MC_LOCAL(uint64_t, uSrc);
2153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2154
2155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2157 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2158 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2159
2160 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2161 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2162
2163 IEM_MC_ADVANCE_RIP();
2164 IEM_MC_END();
2165 }
2166 return VINF_SUCCESS;
2167}
2168
2169
2170/**
2171 * @opcode 0x16
2172 * @opcodesub !11 mr/reg
2173 * @oppfx 0x66
2174 * @opcpuid sse2
2175 * @opgroup og_sse2_pcksclr_datamove
2176 * @opxcpttype 5
2177 * @optest op1=1 op2=2 -> op1=2
2178 * @optest op1=0 op2=-42 -> op1=-42
2179 */
2180FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2181{
2182 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2183 if (IEM_IS_MODRM_MEM_MODE(bRm))
2184 {
2185 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2186 IEM_MC_BEGIN(0, 2);
2187 IEM_MC_LOCAL(uint64_t, uSrc);
2188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2189
2190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2192 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2193 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2194
2195 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2196 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2197
2198 IEM_MC_ADVANCE_RIP();
2199 IEM_MC_END();
2200 return VINF_SUCCESS;
2201 }
2202
2203 /**
2204 * @opdone
2205 * @opmnemonic ud660f16m3
2206 * @opcode 0x16
2207 * @opcodesub 11 mr/reg
2208 * @oppfx 0x66
2209 * @opunused immediate
2210 * @opcpuid sse
2211 * @optest ->
2212 */
2213 return IEMOP_RAISE_INVALID_OPCODE();
2214}
2215
2216
2217/**
2218 * @opcode 0x16
2219 * @oppfx 0xf3
2220 * @opcpuid sse3
2221 * @opgroup og_sse3_pcksclr_datamove
2222 * @opxcpttype 4
2223 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
2224 * op1=0x00000002000000020000000100000001
2225 */
2226FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
2227{
2228 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2230 if (IEM_IS_MODRM_REG_MODE(bRm))
2231 {
2232 /*
2233 * Register, register.
2234 */
2235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2236 IEM_MC_BEGIN(2, 0);
2237 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2238 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2239
2240 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2241 IEM_MC_PREPARE_SSE_USAGE();
2242
2243 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2244 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2245 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
2246
2247 IEM_MC_ADVANCE_RIP();
2248 IEM_MC_END();
2249 }
2250 else
2251 {
2252 /*
2253 * Register, memory.
2254 */
2255 IEM_MC_BEGIN(2, 2);
2256 IEM_MC_LOCAL(RTUINT128U, uSrc);
2257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2258 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2259 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2260
2261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2263 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2264 IEM_MC_PREPARE_SSE_USAGE();
2265
2266 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2267 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2268 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
2269
2270 IEM_MC_ADVANCE_RIP();
2271 IEM_MC_END();
2272 }
2273 return VINF_SUCCESS;
2274}
2275
2276/**
2277 * @opdone
2278 * @opmnemonic udf30f16
2279 * @opcode 0x16
2280 * @oppfx 0xf2
2281 * @opunused intel-modrm
2282 * @opcpuid sse
2283 * @optest ->
2284 * @opdone
2285 */
2286
2287
2288/**
2289 * @opcode 0x17
2290 * @opcodesub !11 mr/reg
2291 * @oppfx none
2292 * @opcpuid sse
2293 * @opgroup og_sse_simdfp_datamove
2294 * @opxcpttype 5
2295 * @optest op1=1 op2=2 -> op1=2
2296 * @optest op1=0 op2=-42 -> op1=-42
2297 */
2298FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2299{
2300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2301 if (IEM_IS_MODRM_MEM_MODE(bRm))
2302 {
2303 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2304
2305 IEM_MC_BEGIN(0, 2);
2306 IEM_MC_LOCAL(uint64_t, uSrc);
2307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2308
2309 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2311 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2312 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2313
2314 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2315 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2316
2317 IEM_MC_ADVANCE_RIP();
2318 IEM_MC_END();
2319 return VINF_SUCCESS;
2320 }
2321
2322 /**
2323 * @opdone
2324 * @opmnemonic ud0f17m3
2325 * @opcode 0x17
2326 * @opcodesub 11 mr/reg
2327 * @oppfx none
2328 * @opunused immediate
2329 * @opcpuid sse
2330 * @optest ->
2331 */
2332 return IEMOP_RAISE_INVALID_OPCODE();
2333}
2334
2335
2336/**
2337 * @opcode 0x17
2338 * @opcodesub !11 mr/reg
2339 * @oppfx 0x66
2340 * @opcpuid sse2
2341 * @opgroup og_sse2_pcksclr_datamove
2342 * @opxcpttype 5
2343 * @optest op1=1 op2=2 -> op1=2
2344 * @optest op1=0 op2=-42 -> op1=-42
2345 */
2346FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
2347{
2348 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2349 if (IEM_IS_MODRM_MEM_MODE(bRm))
2350 {
2351 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2352
2353 IEM_MC_BEGIN(0, 2);
2354 IEM_MC_LOCAL(uint64_t, uSrc);
2355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2356
2357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2359 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2360 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2361
2362 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2363 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2364
2365 IEM_MC_ADVANCE_RIP();
2366 IEM_MC_END();
2367 return VINF_SUCCESS;
2368 }
2369
2370 /**
2371 * @opdone
2372 * @opmnemonic ud660f17m3
2373 * @opcode 0x17
2374 * @opcodesub 11 mr/reg
2375 * @oppfx 0x66
2376 * @opunused immediate
2377 * @opcpuid sse
2378 * @optest ->
2379 */
2380 return IEMOP_RAISE_INVALID_OPCODE();
2381}
2382
2383
2384/**
2385 * @opdone
2386 * @opmnemonic udf30f17
2387 * @opcode 0x17
2388 * @oppfx 0xf3
2389 * @opunused intel-modrm
2390 * @opcpuid sse
2391 * @optest ->
2392 * @opdone
2393 */
2394
2395/**
2396 * @opmnemonic udf20f17
2397 * @opcode 0x17
2398 * @oppfx 0xf2
2399 * @opunused intel-modrm
2400 * @opcpuid sse
2401 * @optest ->
2402 * @opdone
2403 */
2404
2405
2406/** Opcode 0x0f 0x18. */
2407FNIEMOP_DEF(iemOp_prefetch_Grp16)
2408{
2409 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2410 if (IEM_IS_MODRM_MEM_MODE(bRm))
2411 {
2412 switch (IEM_GET_MODRM_REG_8(bRm))
2413 {
2414 case 4: /* Aliased to /0 for the time being according to AMD. */
2415 case 5: /* Aliased to /0 for the time being according to AMD. */
2416 case 6: /* Aliased to /0 for the time being according to AMD. */
2417 case 7: /* Aliased to /0 for the time being according to AMD. */
2418 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2419 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2420 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2421 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2422 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2423 }
2424
2425 IEM_MC_BEGIN(0, 1);
2426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2429 /* Currently a NOP. */
2430 NOREF(GCPtrEffSrc);
2431 IEM_MC_ADVANCE_RIP();
2432 IEM_MC_END();
2433 return VINF_SUCCESS;
2434 }
2435
2436 return IEMOP_RAISE_INVALID_OPCODE();
2437}
2438
2439
2440/** Opcode 0x0f 0x19..0x1f. */
2441FNIEMOP_DEF(iemOp_nop_Ev)
2442{
2443 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2444 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2445 if (IEM_IS_MODRM_REG_MODE(bRm))
2446 {
2447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2448 IEM_MC_BEGIN(0, 0);
2449 IEM_MC_ADVANCE_RIP();
2450 IEM_MC_END();
2451 }
2452 else
2453 {
2454 IEM_MC_BEGIN(0, 1);
2455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2458 /* Currently a NOP. */
2459 NOREF(GCPtrEffSrc);
2460 IEM_MC_ADVANCE_RIP();
2461 IEM_MC_END();
2462 }
2463 return VINF_SUCCESS;
2464}
2465
2466
2467/** Opcode 0x0f 0x20. */
2468FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2469{
2470 /* mod is ignored, as is operand size overrides. */
2471 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2472 IEMOP_HLP_MIN_386();
2473 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2474 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2475 else
2476 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2477
2478 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2479 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
2480 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2481 {
2482 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2483 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2484 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2485 iCrReg |= 8;
2486 }
2487 switch (iCrReg)
2488 {
2489 case 0: case 2: case 3: case 4: case 8:
2490 break;
2491 default:
2492 return IEMOP_RAISE_INVALID_OPCODE();
2493 }
2494 IEMOP_HLP_DONE_DECODING();
2495
2496 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
2497}
2498
2499
2500/** Opcode 0x0f 0x21. */
2501FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2502{
2503 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2504 IEMOP_HLP_MIN_386();
2505 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2507 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2508 return IEMOP_RAISE_INVALID_OPCODE();
2509 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2510 IEM_GET_MODRM_RM(pVCpu, bRm),
2511 IEM_GET_MODRM_REG_8(bRm));
2512}
2513
2514
2515/** Opcode 0x0f 0x22. */
2516FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2517{
2518 /* mod is ignored, as is operand size overrides. */
2519 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2520 IEMOP_HLP_MIN_386();
2521 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2522 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2523 else
2524 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2525
2526 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2527 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
2528 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2529 {
2530 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2531 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2532 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2533 iCrReg |= 8;
2534 }
2535 switch (iCrReg)
2536 {
2537 case 0: case 2: case 3: case 4: case 8:
2538 break;
2539 default:
2540 return IEMOP_RAISE_INVALID_OPCODE();
2541 }
2542 IEMOP_HLP_DONE_DECODING();
2543
2544 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
2545}
2546
2547
2548/** Opcode 0x0f 0x23. */
2549FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2550{
2551 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2552 IEMOP_HLP_MIN_386();
2553 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2555 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2556 return IEMOP_RAISE_INVALID_OPCODE();
2557 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2558 IEM_GET_MODRM_REG_8(bRm),
2559 IEM_GET_MODRM_RM(pVCpu, bRm));
2560}
2561
2562
2563/** Opcode 0x0f 0x24. */
2564FNIEMOP_DEF(iemOp_mov_Rd_Td)
2565{
2566 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2567 IEMOP_HLP_MIN_386();
2568 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2570 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
2571 return IEMOP_RAISE_INVALID_OPCODE();
2572 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
2573 IEM_GET_MODRM_RM(pVCpu, bRm),
2574 IEM_GET_MODRM_REG_8(bRm));
2575}
2576
2577
2578/** Opcode 0x0f 0x26. */
2579FNIEMOP_DEF(iemOp_mov_Td_Rd)
2580{
2581 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2582 IEMOP_HLP_MIN_386();
2583 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2585 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
2586 return IEMOP_RAISE_INVALID_OPCODE();
2587 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
2588 IEM_GET_MODRM_REG_8(bRm),
2589 IEM_GET_MODRM_RM(pVCpu, bRm));
2590}
2591
2592
2593/**
2594 * @opcode 0x28
2595 * @oppfx none
2596 * @opcpuid sse
2597 * @opgroup og_sse_simdfp_datamove
2598 * @opxcpttype 1
2599 * @optest op1=1 op2=2 -> op1=2
2600 * @optest op1=0 op2=-42 -> op1=-42
2601 */
2602FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2603{
2604 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2606 if (IEM_IS_MODRM_REG_MODE(bRm))
2607 {
2608 /*
2609 * Register, register.
2610 */
2611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2612 IEM_MC_BEGIN(0, 0);
2613 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2614 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2615 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2616 IEM_GET_MODRM_RM(pVCpu, bRm));
2617 IEM_MC_ADVANCE_RIP();
2618 IEM_MC_END();
2619 }
2620 else
2621 {
2622 /*
2623 * Register, memory.
2624 */
2625 IEM_MC_BEGIN(0, 2);
2626 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2628
2629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2631 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2632 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2633
2634 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2635 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2636
2637 IEM_MC_ADVANCE_RIP();
2638 IEM_MC_END();
2639 }
2640 return VINF_SUCCESS;
2641}
2642
2643/**
2644 * @opcode 0x28
2645 * @oppfx 66
2646 * @opcpuid sse2
2647 * @opgroup og_sse2_pcksclr_datamove
2648 * @opxcpttype 1
2649 * @optest op1=1 op2=2 -> op1=2
2650 * @optest op1=0 op2=-42 -> op1=-42
2651 */
2652FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2653{
2654 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2655 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2656 if (IEM_IS_MODRM_REG_MODE(bRm))
2657 {
2658 /*
2659 * Register, register.
2660 */
2661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2662 IEM_MC_BEGIN(0, 0);
2663 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2664 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2665 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2666 IEM_GET_MODRM_RM(pVCpu, bRm));
2667 IEM_MC_ADVANCE_RIP();
2668 IEM_MC_END();
2669 }
2670 else
2671 {
2672 /*
2673 * Register, memory.
2674 */
2675 IEM_MC_BEGIN(0, 2);
2676 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2678
2679 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2681 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2682 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2683
2684 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2685 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2686
2687 IEM_MC_ADVANCE_RIP();
2688 IEM_MC_END();
2689 }
2690 return VINF_SUCCESS;
2691}
2692
2693/* Opcode 0xf3 0x0f 0x28 - invalid */
2694/* Opcode 0xf2 0x0f 0x28 - invalid */
2695
2696/**
2697 * @opcode 0x29
2698 * @oppfx none
2699 * @opcpuid sse
2700 * @opgroup og_sse_simdfp_datamove
2701 * @opxcpttype 1
2702 * @optest op1=1 op2=2 -> op1=2
2703 * @optest op1=0 op2=-42 -> op1=-42
2704 */
2705FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2706{
2707 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2709 if (IEM_IS_MODRM_REG_MODE(bRm))
2710 {
2711 /*
2712 * Register, register.
2713 */
2714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2715 IEM_MC_BEGIN(0, 0);
2716 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2717 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2718 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2719 IEM_GET_MODRM_REG(pVCpu, bRm));
2720 IEM_MC_ADVANCE_RIP();
2721 IEM_MC_END();
2722 }
2723 else
2724 {
2725 /*
2726 * Memory, register.
2727 */
2728 IEM_MC_BEGIN(0, 2);
2729 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2731
2732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2734 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2735 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2736
2737 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2738 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2739
2740 IEM_MC_ADVANCE_RIP();
2741 IEM_MC_END();
2742 }
2743 return VINF_SUCCESS;
2744}
2745
2746/**
2747 * @opcode 0x29
2748 * @oppfx 66
2749 * @opcpuid sse2
2750 * @opgroup og_sse2_pcksclr_datamove
2751 * @opxcpttype 1
2752 * @optest op1=1 op2=2 -> op1=2
2753 * @optest op1=0 op2=-42 -> op1=-42
2754 */
2755FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2756{
2757 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2758 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2759 if (IEM_IS_MODRM_REG_MODE(bRm))
2760 {
2761 /*
2762 * Register, register.
2763 */
2764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2765 IEM_MC_BEGIN(0, 0);
2766 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2767 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2768 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2769 IEM_GET_MODRM_REG(pVCpu, bRm));
2770 IEM_MC_ADVANCE_RIP();
2771 IEM_MC_END();
2772 }
2773 else
2774 {
2775 /*
2776 * Memory, register.
2777 */
2778 IEM_MC_BEGIN(0, 2);
2779 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2781
2782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2784 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2785 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2786
2787 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2788 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2789
2790 IEM_MC_ADVANCE_RIP();
2791 IEM_MC_END();
2792 }
2793 return VINF_SUCCESS;
2794}
2795
2796/* Opcode 0xf3 0x0f 0x29 - invalid */
2797/* Opcode 0xf2 0x0f 0x29 - invalid */
2798
2799
2800/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2801FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2802/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2803FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2804/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2805FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2806/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2807FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2808
2809
2810/**
2811 * @opcode 0x2b
2812 * @opcodesub !11 mr/reg
2813 * @oppfx none
2814 * @opcpuid sse
2815 * @opgroup og_sse1_cachect
2816 * @opxcpttype 1
2817 * @optest op1=1 op2=2 -> op1=2
2818 * @optest op1=0 op2=-42 -> op1=-42
2819 */
2820FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2821{
2822 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2824 if (IEM_IS_MODRM_MEM_MODE(bRm))
2825 {
2826 /*
2827 * memory, register.
2828 */
2829 IEM_MC_BEGIN(0, 2);
2830 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2832
2833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2835 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2836 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2837
2838 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2839 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2840
2841 IEM_MC_ADVANCE_RIP();
2842 IEM_MC_END();
2843 }
2844 /* The register, register encoding is invalid. */
2845 else
2846 return IEMOP_RAISE_INVALID_OPCODE();
2847 return VINF_SUCCESS;
2848}
2849
2850/**
2851 * @opcode 0x2b
2852 * @opcodesub !11 mr/reg
2853 * @oppfx 0x66
2854 * @opcpuid sse2
2855 * @opgroup og_sse2_cachect
2856 * @opxcpttype 1
2857 * @optest op1=1 op2=2 -> op1=2
2858 * @optest op1=0 op2=-42 -> op1=-42
2859 */
2860FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2861{
2862 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2864 if (IEM_IS_MODRM_MEM_MODE(bRm))
2865 {
2866 /*
2867 * memory, register.
2868 */
2869 IEM_MC_BEGIN(0, 2);
2870 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2872
2873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2875 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2876 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2877
2878 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2879 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2880
2881 IEM_MC_ADVANCE_RIP();
2882 IEM_MC_END();
2883 }
2884 /* The register, register encoding is invalid. */
2885 else
2886 return IEMOP_RAISE_INVALID_OPCODE();
2887 return VINF_SUCCESS;
2888}
2889/* Opcode 0xf3 0x0f 0x2b - invalid */
2890/* Opcode 0xf2 0x0f 0x2b - invalid */
2891
2892
2893/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2894FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2895/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2896FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2897/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2898FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2899/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2900FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2901
2902/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2903FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2904/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2905FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2906/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2907FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2908/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2909FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2910
2911/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2912FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2913/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2914FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2915/* Opcode 0xf3 0x0f 0x2e - invalid */
2916/* Opcode 0xf2 0x0f 0x2e - invalid */
2917
2918/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2919FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2920/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2921FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2922/* Opcode 0xf3 0x0f 0x2f - invalid */
2923/* Opcode 0xf2 0x0f 0x2f - invalid */
2924
2925/** Opcode 0x0f 0x30. */
2926FNIEMOP_DEF(iemOp_wrmsr)
2927{
2928 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2930 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2931}
2932
2933
2934/** Opcode 0x0f 0x31. */
2935FNIEMOP_DEF(iemOp_rdtsc)
2936{
2937 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2939 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2940}
2941
2942
2943/** Opcode 0x0f 0x33. */
2944FNIEMOP_DEF(iemOp_rdmsr)
2945{
2946 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2948 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2949}
2950
2951
2952/** Opcode 0x0f 0x34. */
2953FNIEMOP_DEF(iemOp_rdpmc)
2954{
2955 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2957 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2958}
2959
2960
2961/** Opcode 0x0f 0x34. */
2962FNIEMOP_DEF(iemOp_sysenter)
2963{
2964 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
2965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2966 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
2967}
2968
2969/** Opcode 0x0f 0x35. */
2970FNIEMOP_DEF(iemOp_sysexit)
2971{
2972 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
2973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2974 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
2975}
2976
2977/** Opcode 0x0f 0x37. */
2978FNIEMOP_STUB(iemOp_getsec);
2979
2980
2981/** Opcode 0x0f 0x38. */
2982FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2983{
2984#ifdef IEM_WITH_THREE_0F_38
2985 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2986 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2987#else
2988 IEMOP_BITCH_ABOUT_STUB();
2989 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2990#endif
2991}
2992
2993
2994/** Opcode 0x0f 0x3a. */
2995FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2996{
2997#ifdef IEM_WITH_THREE_0F_3A
2998 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2999 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
3000#else
3001 IEMOP_BITCH_ABOUT_STUB();
3002 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3003#endif
3004}
3005
3006
3007/**
3008 * Implements a conditional move.
3009 *
3010 * Wish there was an obvious way to do this where we could share and reduce
3011 * code bloat.
3012 *
3013 * @param a_Cnd The conditional "microcode" operation.
3014 */
3015#define CMOV_X(a_Cnd) \
3016 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
3017 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3018 { \
3019 switch (pVCpu->iem.s.enmEffOpSize) \
3020 { \
3021 case IEMMODE_16BIT: \
3022 IEM_MC_BEGIN(0, 1); \
3023 IEM_MC_LOCAL(uint16_t, u16Tmp); \
3024 a_Cnd { \
3025 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3026 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
3027 } IEM_MC_ENDIF(); \
3028 IEM_MC_ADVANCE_RIP(); \
3029 IEM_MC_END(); \
3030 return VINF_SUCCESS; \
3031 \
3032 case IEMMODE_32BIT: \
3033 IEM_MC_BEGIN(0, 1); \
3034 IEM_MC_LOCAL(uint32_t, u32Tmp); \
3035 a_Cnd { \
3036 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3037 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
3038 } IEM_MC_ELSE() { \
3039 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3040 } IEM_MC_ENDIF(); \
3041 IEM_MC_ADVANCE_RIP(); \
3042 IEM_MC_END(); \
3043 return VINF_SUCCESS; \
3044 \
3045 case IEMMODE_64BIT: \
3046 IEM_MC_BEGIN(0, 1); \
3047 IEM_MC_LOCAL(uint64_t, u64Tmp); \
3048 a_Cnd { \
3049 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3050 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
3051 } IEM_MC_ENDIF(); \
3052 IEM_MC_ADVANCE_RIP(); \
3053 IEM_MC_END(); \
3054 return VINF_SUCCESS; \
3055 \
3056 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3057 } \
3058 } \
3059 else \
3060 { \
3061 switch (pVCpu->iem.s.enmEffOpSize) \
3062 { \
3063 case IEMMODE_16BIT: \
3064 IEM_MC_BEGIN(0, 2); \
3065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3066 IEM_MC_LOCAL(uint16_t, u16Tmp); \
3067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3068 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3069 a_Cnd { \
3070 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
3071 } IEM_MC_ENDIF(); \
3072 IEM_MC_ADVANCE_RIP(); \
3073 IEM_MC_END(); \
3074 return VINF_SUCCESS; \
3075 \
3076 case IEMMODE_32BIT: \
3077 IEM_MC_BEGIN(0, 2); \
3078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3079 IEM_MC_LOCAL(uint32_t, u32Tmp); \
3080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3081 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3082 a_Cnd { \
3083 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
3084 } IEM_MC_ELSE() { \
3085 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3086 } IEM_MC_ENDIF(); \
3087 IEM_MC_ADVANCE_RIP(); \
3088 IEM_MC_END(); \
3089 return VINF_SUCCESS; \
3090 \
3091 case IEMMODE_64BIT: \
3092 IEM_MC_BEGIN(0, 2); \
3093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3094 IEM_MC_LOCAL(uint64_t, u64Tmp); \
3095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3096 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3097 a_Cnd { \
3098 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
3099 } IEM_MC_ENDIF(); \
3100 IEM_MC_ADVANCE_RIP(); \
3101 IEM_MC_END(); \
3102 return VINF_SUCCESS; \
3103 \
3104 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3105 } \
3106 } do {} while (0)
3107
3108
3109
3110/** Opcode 0x0f 0x40. */
3111FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
3112{
3113 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
3114 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
3115}
3116
3117
3118/** Opcode 0x0f 0x41. */
3119FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
3120{
3121 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
3122 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
3123}
3124
3125
3126/** Opcode 0x0f 0x42. */
3127FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
3128{
3129 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
3130 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
3131}
3132
3133
3134/** Opcode 0x0f 0x43. */
3135FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
3136{
3137 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
3138 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
3139}
3140
3141
3142/** Opcode 0x0f 0x44. */
3143FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
3144{
3145 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
3146 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
3147}
3148
3149
3150/** Opcode 0x0f 0x45. */
3151FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
3152{
3153 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
3154 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
3155}
3156
3157
3158/** Opcode 0x0f 0x46. */
3159FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
3160{
3161 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
3162 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
3163}
3164
3165
3166/** Opcode 0x0f 0x47. */
3167FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
3168{
3169 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
3170 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
3171}
3172
3173
3174/** Opcode 0x0f 0x48. */
3175FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
3176{
3177 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
3178 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
3179}
3180
3181
3182/** Opcode 0x0f 0x49. */
3183FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
3184{
3185 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
3186 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
3187}
3188
3189
3190/** Opcode 0x0f 0x4a. */
3191FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
3192{
3193 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
3194 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
3195}
3196
3197
3198/** Opcode 0x0f 0x4b. */
3199FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
3200{
3201 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
3202 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
3203}
3204
3205
3206/** Opcode 0x0f 0x4c. */
3207FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
3208{
3209 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
3210 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
3211}
3212
3213
3214/** Opcode 0x0f 0x4d. */
3215FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
3216{
3217 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
3218 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
3219}
3220
3221
3222/** Opcode 0x0f 0x4e. */
3223FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
3224{
3225 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
3226 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
3227}
3228
3229
3230/** Opcode 0x0f 0x4f. */
3231FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
3232{
3233 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
3234 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
3235}
3236
3237#undef CMOV_X
3238
3239/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
3240FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
3241/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
3242FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
3243/* Opcode 0xf3 0x0f 0x50 - invalid */
3244/* Opcode 0xf2 0x0f 0x50 - invalid */
3245
3246/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
3247FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
3248/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
3249FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
3250/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
3251FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
3252/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
3253FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
3254
3255/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
3256FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
3257/* Opcode 0x66 0x0f 0x52 - invalid */
3258/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
3259FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
3260/* Opcode 0xf2 0x0f 0x52 - invalid */
3261
3262/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
3263FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
3264/* Opcode 0x66 0x0f 0x53 - invalid */
3265/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
3266FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
3267/* Opcode 0xf2 0x0f 0x53 - invalid */
3268
3269
3270/** Opcode 0x0f 0x54 - andps Vps, Wps */
3271FNIEMOP_DEF(iemOp_andps_Vps_Wps)
3272{
3273 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3274 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
3275}
3276
3277
3278/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
3279FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
3280{
3281 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3282 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
3283}
3284
3285
3286/* Opcode 0xf3 0x0f 0x54 - invalid */
3287/* Opcode 0xf2 0x0f 0x54 - invalid */
3288
3289
3290/** Opcode 0x0f 0x55 - andnps Vps, Wps */
3291FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
3292{
3293 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3294 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
3295}
3296
3297
3298/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
3299FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
3300{
3301 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3302 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
3303}
3304
3305
3306/* Opcode 0xf3 0x0f 0x55 - invalid */
3307/* Opcode 0xf2 0x0f 0x55 - invalid */
3308
3309
3310/** Opcode 0x0f 0x56 - orps Vps, Wps */
3311FNIEMOP_DEF(iemOp_orps_Vps_Wps)
3312{
3313 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3314 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
3315}
3316
3317
3318/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
3319FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
3320{
3321 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3322 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
3323}
3324
3325
3326/* Opcode 0xf3 0x0f 0x56 - invalid */
3327/* Opcode 0xf2 0x0f 0x56 - invalid */
3328
3329
3330/** Opcode 0x0f 0x57 - xorps Vps, Wps */
3331FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
3332{
3333 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3334 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
3335}
3336
3337
3338/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
3339FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
3340{
3341 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3342 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
3343}
3344
3345
3346/* Opcode 0xf3 0x0f 0x57 - invalid */
3347/* Opcode 0xf2 0x0f 0x57 - invalid */
3348
3349/** Opcode 0x0f 0x58 - addps Vps, Wps */
3350FNIEMOP_STUB(iemOp_addps_Vps_Wps);
3351/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
3352FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
3353/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
3354FNIEMOP_STUB(iemOp_addss_Vss_Wss);
3355/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
3356FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
3357
3358/** Opcode 0x0f 0x59 - mulps Vps, Wps */
3359FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
3360/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
3361FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
3362/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
3363FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
3364/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
3365FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
3366
3367/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
3368FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
3369/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
3370FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
3371/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
3372FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
3373/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
3374FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
3375
3376/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
3377FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
3378/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
3379FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
3380/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
3381FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
3382/* Opcode 0xf2 0x0f 0x5b - invalid */
3383
3384/** Opcode 0x0f 0x5c - subps Vps, Wps */
3385FNIEMOP_STUB(iemOp_subps_Vps_Wps);
3386/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
3387FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
3388/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
3389FNIEMOP_STUB(iemOp_subss_Vss_Wss);
3390/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
3391FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
3392
3393/** Opcode 0x0f 0x5d - minps Vps, Wps */
3394FNIEMOP_STUB(iemOp_minps_Vps_Wps);
3395/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
3396FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
3397/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
3398FNIEMOP_STUB(iemOp_minss_Vss_Wss);
3399/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
3400FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
3401
3402/** Opcode 0x0f 0x5e - divps Vps, Wps */
3403FNIEMOP_STUB(iemOp_divps_Vps_Wps);
3404/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
3405FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
3406/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
3407FNIEMOP_STUB(iemOp_divss_Vss_Wss);
3408/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
3409FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
3410
3411/** Opcode 0x0f 0x5f - maxps Vps, Wps */
3412FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
3413/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
3414FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
3415/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
3416FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
3417/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
3418FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
3419
3420/**
3421 * Common worker for MMX instructions on the forms:
3422 * pxxxx mm1, mm2/mem32
3423 *
3424 * The 2nd operand is the first half of a register, which in the memory case
3425 * means a 32-bit memory access.
3426 */
3427FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, FNIEMAIMPLMEDIAOPTF2U64, pfnU64)
3428{
3429 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3430 if (IEM_IS_MODRM_REG_MODE(bRm))
3431 {
3432 /*
3433 * Register, register.
3434 */
3435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3436 IEM_MC_BEGIN(2, 0);
3437 IEM_MC_ARG(uint64_t *, puDst, 0);
3438 IEM_MC_ARG(uint64_t const *, puSrc, 1);
3439 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3440 IEM_MC_PREPARE_FPU_USAGE();
3441 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
3442 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
3443 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
3444 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
3445 IEM_MC_FPU_TO_MMX_MODE();
3446 IEM_MC_ADVANCE_RIP();
3447 IEM_MC_END();
3448 }
3449 else
3450 {
3451 /*
3452 * Register, memory.
3453 */
3454 IEM_MC_BEGIN(2, 2);
3455 IEM_MC_ARG(uint64_t *, puDst, 0);
3456 IEM_MC_LOCAL(uint64_t, uSrc);
3457 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
3458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3459
3460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3462 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3463 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3464
3465 IEM_MC_PREPARE_FPU_USAGE();
3466 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
3467 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
3468 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
3469 IEM_MC_FPU_TO_MMX_MODE();
3470
3471 IEM_MC_ADVANCE_RIP();
3472 IEM_MC_END();
3473 }
3474 return VINF_SUCCESS;
3475}
3476
3477
3478/**
3479 * Common worker for SSE2 instructions on the forms:
3480 * pxxxx xmm1, xmm2/mem128
3481 *
3482 * The 2nd operand is the first half of a register, which in the memory case
3483 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
3484 *
3485 * Exceptions type 4.
3486 */
3487FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
3488{
3489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3490 if (IEM_IS_MODRM_REG_MODE(bRm))
3491 {
3492 /*
3493 * Register, register.
3494 */
3495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3496 IEM_MC_BEGIN(2, 0);
3497 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3498 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3499 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3500 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3501 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3502 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3503 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
3504 IEM_MC_ADVANCE_RIP();
3505 IEM_MC_END();
3506 }
3507 else
3508 {
3509 /*
3510 * Register, memory.
3511 */
3512 IEM_MC_BEGIN(2, 2);
3513 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3514 IEM_MC_LOCAL(RTUINT128U, uSrc);
3515 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3517
3518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3520 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3521 /** @todo Most CPUs probably only read the low qword. We read everything to
3522 * make sure we apply segmentation and alignment checks correctly.
3523 * When we have time, it would be interesting to explore what real
3524 * CPUs actually does and whether it will do a TLB load for the high
3525 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
3526 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3527
3528 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3529 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3530 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
3531
3532 IEM_MC_ADVANCE_RIP();
3533 IEM_MC_END();
3534 }
3535 return VINF_SUCCESS;
3536}
3537
3538
3539/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3540FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3541{
3542 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3543 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
3544}
3545
3546
3547/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3548FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3549{
3550 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3551 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
3552}
3553
3554
3555/* Opcode 0xf3 0x0f 0x60 - invalid */
3556
3557
3558/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3559FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3560{
3561 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3562 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3563 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
3564}
3565
3566
3567/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3568FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3569{
3570 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3571 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
3572}
3573
3574
3575/* Opcode 0xf3 0x0f 0x61 - invalid */
3576
3577
3578/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3579FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3580{
3581 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3582 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
3583}
3584
3585
3586/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3587FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3588{
3589 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3590 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
3591}
3592
3593
3594/* Opcode 0xf3 0x0f 0x62 - invalid */
3595
3596
3597
3598/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3599FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
3600{
3601 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3602 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
3603}
3604
3605
3606/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3607FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
3608{
3609 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3610 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
3611}
3612
3613
3614/* Opcode 0xf3 0x0f 0x63 - invalid */
3615
3616
3617/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3618FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
3619{
3620 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3621 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
3622}
3623
3624
3625/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3626FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
3627{
3628 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3629 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
3630}
3631
3632
3633/* Opcode 0xf3 0x0f 0x64 - invalid */
3634
3635
3636/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3637FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
3638{
3639 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3640 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
3641}
3642
3643
3644/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3645FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
3646{
3647 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3648 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
3649}
3650
3651
3652/* Opcode 0xf3 0x0f 0x65 - invalid */
3653
3654
3655/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3656FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
3657{
3658 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3659 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
3660}
3661
3662
3663/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3664FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
3665{
3666 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3667 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
3668}
3669
3670
3671/* Opcode 0xf3 0x0f 0x66 - invalid */
3672
3673
3674/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3675FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
3676{
3677 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3678 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
3679}
3680
3681
3682/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
3683FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
3684{
3685 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3686 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
3687}
3688
3689
3690/* Opcode 0xf3 0x0f 0x67 - invalid */
3691
3692
3693/**
3694 * Common worker for MMX instructions on the form:
3695 * pxxxx mm1, mm2/mem64
3696 *
3697 * The 2nd operand is the second half of a register, which in the memory case
3698 * means a 64-bit memory access for MMX.
3699 */
3700FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
3701{
3702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3703 if (IEM_IS_MODRM_REG_MODE(bRm))
3704 {
3705 /*
3706 * Register, register.
3707 */
3708 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3709 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3711 IEM_MC_BEGIN(2, 0);
3712 IEM_MC_ARG(uint64_t *, puDst, 0);
3713 IEM_MC_ARG(uint64_t const *, puSrc, 1);
3714 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3715 IEM_MC_PREPARE_FPU_USAGE();
3716 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
3717 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
3718 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
3719 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
3720 IEM_MC_FPU_TO_MMX_MODE();
3721 IEM_MC_ADVANCE_RIP();
3722 IEM_MC_END();
3723 }
3724 else
3725 {
3726 /*
3727 * Register, memory.
3728 */
3729 IEM_MC_BEGIN(2, 2);
3730 IEM_MC_ARG(uint64_t *, puDst, 0);
3731 IEM_MC_LOCAL(uint64_t, uSrc);
3732 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
3733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3734
3735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3737 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3738 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
3739
3740 IEM_MC_PREPARE_FPU_USAGE();
3741 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
3742 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
3743 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
3744 IEM_MC_FPU_TO_MMX_MODE();
3745
3746 IEM_MC_ADVANCE_RIP();
3747 IEM_MC_END();
3748 }
3749 return VINF_SUCCESS;
3750}
3751
3752
3753/**
3754 * Common worker for SSE2 instructions on the form:
3755 * pxxxx xmm1, xmm2/mem128
3756 *
3757 * The 2nd operand is the second half of a register, which for SSE a 128-bit
3758 * aligned access where it may read the full 128 bits or only the upper 64 bits.
3759 *
3760 * Exceptions type 4.
3761 */
3762FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
3763{
3764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3765 if (IEM_IS_MODRM_REG_MODE(bRm))
3766 {
3767 /*
3768 * Register, register.
3769 */
3770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3771 IEM_MC_BEGIN(2, 0);
3772 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3773 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3774 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3775 IEM_MC_PREPARE_SSE_USAGE();
3776 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3777 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3778 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
3779 IEM_MC_ADVANCE_RIP();
3780 IEM_MC_END();
3781 }
3782 else
3783 {
3784 /*
3785 * Register, memory.
3786 */
3787 IEM_MC_BEGIN(2, 2);
3788 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3789 IEM_MC_LOCAL(RTUINT128U, uSrc);
3790 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3792
3793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3795 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3796 /** @todo Most CPUs probably only read the high qword. We read everything to
3797 * make sure we apply segmentation and alignment checks correctly.
3798 * When we have time, it would be interesting to explore what real
3799 * CPUs actually does and whether it will do a TLB load for the lower
3800 * part or skip any associated \#PF. */
3801 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3802
3803 IEM_MC_PREPARE_SSE_USAGE();
3804 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3805 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
3806
3807 IEM_MC_ADVANCE_RIP();
3808 IEM_MC_END();
3809 }
3810 return VINF_SUCCESS;
3811}
3812
3813
3814/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
3815 * @note Intel and AMD both uses Qd for the second parameter, however they
3816 * both list it as a mmX/mem64 operand and intel describes it as being
3817 * loaded as a qword, so it should be Qq, shouldn't it? */
3818FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
3819{
3820 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3821 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
3822}
3823
3824
3825/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3826FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3827{
3828 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3829 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
3830}
3831
3832
3833/* Opcode 0xf3 0x0f 0x68 - invalid */
3834
3835
3836/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
3837 * @note Intel and AMD both uses Qd for the second parameter, however they
3838 * both list it as a mmX/mem64 operand and intel describes it as being
3839 * loaded as a qword, so it should be Qq, shouldn't it? */
3840FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
3841{
3842 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3843 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
3844}
3845
3846
3847/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3848FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3849{
3850 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3851 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
3852
3853}
3854
3855
3856/* Opcode 0xf3 0x0f 0x69 - invalid */
3857
3858
3859/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
3860 * @note Intel and AMD both uses Qd for the second parameter, however they
3861 * both list it as a mmX/mem64 operand and intel describes it as being
3862 * loaded as a qword, so it should be Qq, shouldn't it? */
3863FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
3864{
3865 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3866 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
3867}
3868
3869
3870/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
3871FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
3872{
3873 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3874 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
3875}
3876
3877
3878/* Opcode 0xf3 0x0f 0x6a - invalid */
3879
3880
3881/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3882FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
3883{
3884 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3885 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
3886}
3887
3888
3889/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3890FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
3891{
3892 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3893 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
3894}
3895
3896
3897/* Opcode 0xf3 0x0f 0x6b - invalid */
3898
3899
3900/* Opcode 0x0f 0x6c - invalid */
3901
3902
3903/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3904FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3905{
3906 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3907 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
3908}
3909
3910
3911/* Opcode 0xf3 0x0f 0x6c - invalid */
3912/* Opcode 0xf2 0x0f 0x6c - invalid */
3913
3914
3915/* Opcode 0x0f 0x6d - invalid */
3916
3917
3918/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
3919FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
3920{
3921 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3922 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
3923}
3924
3925
3926/* Opcode 0xf3 0x0f 0x6d - invalid */
3927
3928
3929FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3930{
3931 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3932 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3933 {
3934 /**
3935 * @opcode 0x6e
3936 * @opcodesub rex.w=1
3937 * @oppfx none
3938 * @opcpuid mmx
3939 * @opgroup og_mmx_datamove
3940 * @opxcpttype 5
3941 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3942 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3943 */
3944 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
3945 if (IEM_IS_MODRM_REG_MODE(bRm))
3946 {
3947 /* MMX, greg64 */
3948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3949 IEM_MC_BEGIN(0, 1);
3950 IEM_MC_LOCAL(uint64_t, u64Tmp);
3951
3952 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3953 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3954
3955 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3956 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
3957 IEM_MC_FPU_TO_MMX_MODE();
3958
3959 IEM_MC_ADVANCE_RIP();
3960 IEM_MC_END();
3961 }
3962 else
3963 {
3964 /* MMX, [mem64] */
3965 IEM_MC_BEGIN(0, 2);
3966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3967 IEM_MC_LOCAL(uint64_t, u64Tmp);
3968
3969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3971 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3972 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3973
3974 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3975 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
3976 IEM_MC_FPU_TO_MMX_MODE();
3977
3978 IEM_MC_ADVANCE_RIP();
3979 IEM_MC_END();
3980 }
3981 }
3982 else
3983 {
3984 /**
3985 * @opdone
3986 * @opcode 0x6e
3987 * @opcodesub rex.w=0
3988 * @oppfx none
3989 * @opcpuid mmx
3990 * @opgroup og_mmx_datamove
3991 * @opxcpttype 5
3992 * @opfunction iemOp_movd_q_Pd_Ey
3993 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3994 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3995 */
3996 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
3997 if (IEM_IS_MODRM_REG_MODE(bRm))
3998 {
3999 /* MMX, greg */
4000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4001 IEM_MC_BEGIN(0, 1);
4002 IEM_MC_LOCAL(uint64_t, u64Tmp);
4003
4004 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4005 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4006
4007 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4008 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4009 IEM_MC_FPU_TO_MMX_MODE();
4010
4011 IEM_MC_ADVANCE_RIP();
4012 IEM_MC_END();
4013 }
4014 else
4015 {
4016 /* MMX, [mem] */
4017 IEM_MC_BEGIN(0, 2);
4018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4019 IEM_MC_LOCAL(uint32_t, u32Tmp);
4020
4021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4023 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4024 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4025
4026 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4027 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
4028 IEM_MC_FPU_TO_MMX_MODE();
4029
4030 IEM_MC_ADVANCE_RIP();
4031 IEM_MC_END();
4032 }
4033 }
4034 return VINF_SUCCESS;
4035}
4036
4037FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
4038{
4039 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4040 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4041 {
4042 /**
4043 * @opcode 0x6e
4044 * @opcodesub rex.w=1
4045 * @oppfx 0x66
4046 * @opcpuid sse2
4047 * @opgroup og_sse2_simdint_datamove
4048 * @opxcpttype 5
4049 * @optest 64-bit / op1=1 op2=2 -> op1=2
4050 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4051 */
4052 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
4053 if (IEM_IS_MODRM_REG_MODE(bRm))
4054 {
4055 /* XMM, greg64 */
4056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4057 IEM_MC_BEGIN(0, 1);
4058 IEM_MC_LOCAL(uint64_t, u64Tmp);
4059
4060 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4061 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4062
4063 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4064 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4065
4066 IEM_MC_ADVANCE_RIP();
4067 IEM_MC_END();
4068 }
4069 else
4070 {
4071 /* XMM, [mem64] */
4072 IEM_MC_BEGIN(0, 2);
4073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4074 IEM_MC_LOCAL(uint64_t, u64Tmp);
4075
4076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4078 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4079 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4080
4081 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4082 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4083
4084 IEM_MC_ADVANCE_RIP();
4085 IEM_MC_END();
4086 }
4087 }
4088 else
4089 {
4090 /**
4091 * @opdone
4092 * @opcode 0x6e
4093 * @opcodesub rex.w=0
4094 * @oppfx 0x66
4095 * @opcpuid sse2
4096 * @opgroup og_sse2_simdint_datamove
4097 * @opxcpttype 5
4098 * @opfunction iemOp_movd_q_Vy_Ey
4099 * @optest op1=1 op2=2 -> op1=2
4100 * @optest op1=0 op2=-42 -> op1=-42
4101 */
4102 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
4103 if (IEM_IS_MODRM_REG_MODE(bRm))
4104 {
4105 /* XMM, greg32 */
4106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4107 IEM_MC_BEGIN(0, 1);
4108 IEM_MC_LOCAL(uint32_t, u32Tmp);
4109
4110 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4111 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4112
4113 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4114 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4115
4116 IEM_MC_ADVANCE_RIP();
4117 IEM_MC_END();
4118 }
4119 else
4120 {
4121 /* XMM, [mem32] */
4122 IEM_MC_BEGIN(0, 2);
4123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4124 IEM_MC_LOCAL(uint32_t, u32Tmp);
4125
4126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4128 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4129 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4130
4131 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4132 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4133
4134 IEM_MC_ADVANCE_RIP();
4135 IEM_MC_END();
4136 }
4137 }
4138 return VINF_SUCCESS;
4139}
4140
4141/* Opcode 0xf3 0x0f 0x6e - invalid */
4142
4143
4144/**
4145 * @opcode 0x6f
4146 * @oppfx none
4147 * @opcpuid mmx
4148 * @opgroup og_mmx_datamove
4149 * @opxcpttype 5
4150 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4151 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4152 */
4153FNIEMOP_DEF(iemOp_movq_Pq_Qq)
4154{
4155 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4157 if (IEM_IS_MODRM_REG_MODE(bRm))
4158 {
4159 /*
4160 * Register, register.
4161 */
4162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4163 IEM_MC_BEGIN(0, 1);
4164 IEM_MC_LOCAL(uint64_t, u64Tmp);
4165
4166 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4167 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4168
4169 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
4170 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4171 IEM_MC_FPU_TO_MMX_MODE();
4172
4173 IEM_MC_ADVANCE_RIP();
4174 IEM_MC_END();
4175 }
4176 else
4177 {
4178 /*
4179 * Register, memory.
4180 */
4181 IEM_MC_BEGIN(0, 2);
4182 IEM_MC_LOCAL(uint64_t, u64Tmp);
4183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4184
4185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4187 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4188 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4189
4190 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4191 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4192 IEM_MC_FPU_TO_MMX_MODE();
4193
4194 IEM_MC_ADVANCE_RIP();
4195 IEM_MC_END();
4196 }
4197 return VINF_SUCCESS;
4198}
4199
4200/**
4201 * @opcode 0x6f
4202 * @oppfx 0x66
4203 * @opcpuid sse2
4204 * @opgroup og_sse2_simdint_datamove
4205 * @opxcpttype 1
4206 * @optest op1=1 op2=2 -> op1=2
4207 * @optest op1=0 op2=-42 -> op1=-42
4208 */
4209FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
4210{
4211 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4213 if (IEM_IS_MODRM_REG_MODE(bRm))
4214 {
4215 /*
4216 * Register, register.
4217 */
4218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4219 IEM_MC_BEGIN(0, 0);
4220
4221 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4222 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4223
4224 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
4225 IEM_GET_MODRM_RM(pVCpu, bRm));
4226 IEM_MC_ADVANCE_RIP();
4227 IEM_MC_END();
4228 }
4229 else
4230 {
4231 /*
4232 * Register, memory.
4233 */
4234 IEM_MC_BEGIN(0, 2);
4235 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4237
4238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4240 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4241 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4242
4243 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4244 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4245
4246 IEM_MC_ADVANCE_RIP();
4247 IEM_MC_END();
4248 }
4249 return VINF_SUCCESS;
4250}
4251
4252/**
4253 * @opcode 0x6f
4254 * @oppfx 0xf3
4255 * @opcpuid sse2
4256 * @opgroup og_sse2_simdint_datamove
4257 * @opxcpttype 4UA
4258 * @optest op1=1 op2=2 -> op1=2
4259 * @optest op1=0 op2=-42 -> op1=-42
4260 */
4261FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
4262{
4263 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4265 if (IEM_IS_MODRM_REG_MODE(bRm))
4266 {
4267 /*
4268 * Register, register.
4269 */
4270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4271 IEM_MC_BEGIN(0, 0);
4272 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4273 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4274 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
4275 IEM_GET_MODRM_RM(pVCpu, bRm));
4276 IEM_MC_ADVANCE_RIP();
4277 IEM_MC_END();
4278 }
4279 else
4280 {
4281 /*
4282 * Register, memory.
4283 */
4284 IEM_MC_BEGIN(0, 2);
4285 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4287
4288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4290 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4291 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4292 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4293 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4294
4295 IEM_MC_ADVANCE_RIP();
4296 IEM_MC_END();
4297 }
4298 return VINF_SUCCESS;
4299}
4300
4301
4302/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
4303FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
4304{
4305 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4307 if (IEM_IS_MODRM_REG_MODE(bRm))
4308 {
4309 /*
4310 * Register, register.
4311 */
4312 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4314
4315 IEM_MC_BEGIN(3, 0);
4316 IEM_MC_ARG(uint64_t *, pDst, 0);
4317 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4318 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4319 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
4320 IEM_MC_PREPARE_FPU_USAGE();
4321 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
4322 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
4323 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
4324 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
4325 IEM_MC_FPU_TO_MMX_MODE();
4326 IEM_MC_ADVANCE_RIP();
4327 IEM_MC_END();
4328 }
4329 else
4330 {
4331 /*
4332 * Register, memory.
4333 */
4334 IEM_MC_BEGIN(3, 2);
4335 IEM_MC_ARG(uint64_t *, pDst, 0);
4336 IEM_MC_LOCAL(uint64_t, uSrc);
4337 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4339
4340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4341 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4342 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4344 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
4345
4346 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4347 IEM_MC_PREPARE_FPU_USAGE();
4348 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
4349 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
4350 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
4351 IEM_MC_FPU_TO_MMX_MODE();
4352
4353 IEM_MC_ADVANCE_RIP();
4354 IEM_MC_END();
4355 }
4356 return VINF_SUCCESS;
4357}
4358
4359
4360/**
4361 * Common worker for SSE2 instructions on the forms:
4362 * pshufd xmm1, xmm2/mem128, imm8
4363 * pshufhw xmm1, xmm2/mem128, imm8
4364 * pshuflw xmm1, xmm2/mem128, imm8
4365 *
4366 * Proper alignment of the 128-bit operand is enforced.
4367 * Exceptions type 4. SSE2 cpuid checks.
4368 */
4369FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
4370{
4371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4372 if (IEM_IS_MODRM_REG_MODE(bRm))
4373 {
4374 /*
4375 * Register, register.
4376 */
4377 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4379
4380 IEM_MC_BEGIN(3, 0);
4381 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4382 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4383 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4384 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4385 IEM_MC_PREPARE_SSE_USAGE();
4386 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4387 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4388 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
4389 IEM_MC_ADVANCE_RIP();
4390 IEM_MC_END();
4391 }
4392 else
4393 {
4394 /*
4395 * Register, memory.
4396 */
4397 IEM_MC_BEGIN(3, 2);
4398 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4399 IEM_MC_LOCAL(RTUINT128U, uSrc);
4400 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
4401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4402
4403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4404 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4405 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4407 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4408
4409 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4410 IEM_MC_PREPARE_SSE_USAGE();
4411 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4412 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
4413
4414 IEM_MC_ADVANCE_RIP();
4415 IEM_MC_END();
4416 }
4417 return VINF_SUCCESS;
4418}
4419
4420
4421/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
4422FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
4423{
4424 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4425 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
4426}
4427
4428
4429/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
4430FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
4431{
4432 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4433 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
4434}
4435
4436
4437/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
4438FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
4439{
4440 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4441 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
4442}
4443
4444
4445/**
4446 * Common worker for MMX instructions of the form:
4447 * psrlw mm, imm8
4448 * psraw mm, imm8
4449 * psllw mm, imm8
4450 * psrld mm, imm8
4451 * psrad mm, imm8
4452 * pslld mm, imm8
4453 * psrlq mm, imm8
4454 * psllq mm, imm8
4455 *
4456 */
4457FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
4458{
4459 if (IEM_IS_MODRM_REG_MODE(bRm))
4460 {
4461 /*
4462 * Register, immediate.
4463 */
4464 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4466
4467 IEM_MC_BEGIN(2, 0);
4468 IEM_MC_ARG(uint64_t *, pDst, 0);
4469 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
4470 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4471 IEM_MC_PREPARE_FPU_USAGE();
4472 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
4473 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
4474 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
4475 IEM_MC_FPU_TO_MMX_MODE();
4476 IEM_MC_ADVANCE_RIP();
4477 IEM_MC_END();
4478 }
4479 else
4480 {
4481 /*
4482 * Register, memory not supported.
4483 */
4484 /// @todo Caller already enforced register mode?!
4485 }
4486 return VINF_SUCCESS;
4487}
4488
4489
4490/**
4491 * Common worker for SSE2 instructions of the form:
4492 * psrlw xmm, imm8
4493 * psraw xmm, imm8
4494 * psllw xmm, imm8
4495 * psrld xmm, imm8
4496 * psrad xmm, imm8
4497 * pslld xmm, imm8
4498 * psrlq xmm, imm8
4499 * psllq xmm, imm8
4500 *
4501 */
4502FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
4503{
4504 if (IEM_IS_MODRM_REG_MODE(bRm))
4505 {
4506 /*
4507 * Register, immediate.
4508 */
4509 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4511
4512 IEM_MC_BEGIN(2, 0);
4513 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4514 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
4515 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4516 IEM_MC_PREPARE_SSE_USAGE();
4517 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4518 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
4519 IEM_MC_ADVANCE_RIP();
4520 IEM_MC_END();
4521 }
4522 else
4523 {
4524 /*
4525 * Register, memory.
4526 */
4527 /// @todo Caller already enforced register mode?!
4528 }
4529 return VINF_SUCCESS;
4530}
4531
4532
4533/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
4534FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
4535{
4536// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4537 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
4538}
4539
4540
4541/** Opcode 0x66 0x0f 0x71 11/2. */
4542FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
4543{
4544// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4545 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
4546}
4547
4548
4549/** Opcode 0x0f 0x71 11/4. */
4550FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
4551{
4552// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4553 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
4554}
4555
4556
4557/** Opcode 0x66 0x0f 0x71 11/4. */
4558FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
4559{
4560// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4561 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
4562}
4563
4564
4565/** Opcode 0x0f 0x71 11/6. */
4566FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
4567{
4568// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4569 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
4570}
4571
4572
4573/** Opcode 0x66 0x0f 0x71 11/6. */
4574FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
4575{
4576// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4577 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
4578}
4579
4580
4581/**
4582 * Group 12 jump table for register variant.
4583 */
4584IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
4585{
4586 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4587 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4588 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4589 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4590 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4591 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4592 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4593 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4594};
4595AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
4596
4597
4598/** Opcode 0x0f 0x71. */
4599FNIEMOP_DEF(iemOp_Grp12)
4600{
4601 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4602 if (IEM_IS_MODRM_REG_MODE(bRm))
4603 /* register, register */
4604 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4605 + pVCpu->iem.s.idxPrefix], bRm);
4606 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4607}
4608
4609
4610/** Opcode 0x0f 0x72 11/2. */
4611FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
4612{
4613// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4614 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
4615}
4616
4617
4618/** Opcode 0x66 0x0f 0x72 11/2. */
4619FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
4620{
4621// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4622 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
4623}
4624
4625
4626/** Opcode 0x0f 0x72 11/4. */
4627FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
4628{
4629// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4630 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
4631}
4632
4633
4634/** Opcode 0x66 0x0f 0x72 11/4. */
4635FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
4636{
4637// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4638 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
4639}
4640
4641
4642/** Opcode 0x0f 0x72 11/6. */
4643FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
4644{
4645// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4646 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
4647}
4648
4649/** Opcode 0x66 0x0f 0x72 11/6. */
4650FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
4651{
4652// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4653 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
4654}
4655
4656
4657/**
4658 * Group 13 jump table for register variant.
4659 */
4660IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4661{
4662 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4663 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4664 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4665 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4666 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4667 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4668 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4669 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4670};
4671AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4672
4673/** Opcode 0x0f 0x72. */
4674FNIEMOP_DEF(iemOp_Grp13)
4675{
4676 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4677 if (IEM_IS_MODRM_REG_MODE(bRm))
4678 /* register, register */
4679 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4680 + pVCpu->iem.s.idxPrefix], bRm);
4681 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4682}
4683
4684
4685/** Opcode 0x0f 0x73 11/2. */
4686FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
4687{
4688// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4689 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
4690}
4691
4692
4693/** Opcode 0x66 0x0f 0x73 11/2. */
4694FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
4695{
4696// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4697 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
4698}
4699
4700
4701/** Opcode 0x66 0x0f 0x73 11/3. */
4702FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
4703{
4704// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4705 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
4706}
4707
4708
4709/** Opcode 0x0f 0x73 11/6. */
4710FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
4711{
4712// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4713 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
4714}
4715
4716
4717/** Opcode 0x66 0x0f 0x73 11/6. */
4718FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
4719{
4720// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4721 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
4722}
4723
4724
4725/** Opcode 0x66 0x0f 0x73 11/7. */
4726FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
4727{
4728// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4729 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
4730}
4731
4732/**
4733 * Group 14 jump table for register variant.
4734 */
4735IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4736{
4737 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4738 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4739 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4740 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4741 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4742 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4743 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4744 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4745};
4746AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4747
4748
4749/** Opcode 0x0f 0x73. */
4750FNIEMOP_DEF(iemOp_Grp14)
4751{
4752 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4753 if (IEM_IS_MODRM_REG_MODE(bRm))
4754 /* register, register */
4755 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4756 + pVCpu->iem.s.idxPrefix], bRm);
4757 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4758}
4759
4760
4761/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4762FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4763{
4764 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4765 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
4766}
4767
4768
4769/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4770FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4771{
4772 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4773 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
4774}
4775
4776
4777/* Opcode 0xf3 0x0f 0x74 - invalid */
4778/* Opcode 0xf2 0x0f 0x74 - invalid */
4779
4780
4781/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4782FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4783{
4784 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4785 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
4786}
4787
4788
4789/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4790FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4791{
4792 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4793 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
4794}
4795
4796
4797/* Opcode 0xf3 0x0f 0x75 - invalid */
4798/* Opcode 0xf2 0x0f 0x75 - invalid */
4799
4800
4801/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4802FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4803{
4804 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4805 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
4806}
4807
4808
4809/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4810FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4811{
4812 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4813 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
4814}
4815
4816
4817/* Opcode 0xf3 0x0f 0x76 - invalid */
4818/* Opcode 0xf2 0x0f 0x76 - invalid */
4819
4820
4821/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4822FNIEMOP_DEF(iemOp_emms)
4823{
4824 IEMOP_MNEMONIC(emms, "emms");
4825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4826
4827 IEM_MC_BEGIN(0,0);
4828 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
4829 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4830 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4831 IEM_MC_FPU_FROM_MMX_MODE();
4832 IEM_MC_ADVANCE_RIP();
4833 IEM_MC_END();
4834 return VINF_SUCCESS;
4835}
4836
4837/* Opcode 0x66 0x0f 0x77 - invalid */
4838/* Opcode 0xf3 0x0f 0x77 - invalid */
4839/* Opcode 0xf2 0x0f 0x77 - invalid */
4840
4841/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4842#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4843FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
4844{
4845 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
4846 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
4847 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
4848 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
4849
4850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4851 if (IEM_IS_MODRM_REG_MODE(bRm))
4852 {
4853 /*
4854 * Register, register.
4855 */
4856 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4857 if (enmEffOpSize == IEMMODE_64BIT)
4858 {
4859 IEM_MC_BEGIN(2, 0);
4860 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4861 IEM_MC_ARG(uint64_t, u64Enc, 1);
4862 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
4863 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
4864 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
4865 IEM_MC_END();
4866 }
4867 else
4868 {
4869 IEM_MC_BEGIN(2, 0);
4870 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4871 IEM_MC_ARG(uint32_t, u32Enc, 1);
4872 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
4873 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
4874 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
4875 IEM_MC_END();
4876 }
4877 }
4878 else
4879 {
4880 /*
4881 * Memory, register.
4882 */
4883 if (enmEffOpSize == IEMMODE_64BIT)
4884 {
4885 IEM_MC_BEGIN(3, 0);
4886 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4887 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
4888 IEM_MC_ARG(uint64_t, u64Enc, 2);
4889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4890 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4891 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
4892 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4893 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
4894 IEM_MC_END();
4895 }
4896 else
4897 {
4898 IEM_MC_BEGIN(3, 0);
4899 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4900 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
4901 IEM_MC_ARG(uint32_t, u32Enc, 2);
4902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4903 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4904 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
4905 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4906 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
4907 IEM_MC_END();
4908 }
4909 }
4910 return VINF_SUCCESS;
4911}
4912#else
4913FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4914#endif
4915
4916/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4917FNIEMOP_STUB(iemOp_AmdGrp17);
4918/* Opcode 0xf3 0x0f 0x78 - invalid */
4919/* Opcode 0xf2 0x0f 0x78 - invalid */
4920
4921/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4922#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4923FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
4924{
4925 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
4926 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
4927 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
4928 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
4929
4930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4931 if (IEM_IS_MODRM_REG_MODE(bRm))
4932 {
4933 /*
4934 * Register, register.
4935 */
4936 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4937 if (enmEffOpSize == IEMMODE_64BIT)
4938 {
4939 IEM_MC_BEGIN(2, 0);
4940 IEM_MC_ARG(uint64_t, u64Val, 0);
4941 IEM_MC_ARG(uint64_t, u64Enc, 1);
4942 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
4943 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
4944 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
4945 IEM_MC_END();
4946 }
4947 else
4948 {
4949 IEM_MC_BEGIN(2, 0);
4950 IEM_MC_ARG(uint32_t, u32Val, 0);
4951 IEM_MC_ARG(uint32_t, u32Enc, 1);
4952 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
4953 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
4954 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
4955 IEM_MC_END();
4956 }
4957 }
4958 else
4959 {
4960 /*
4961 * Register, memory.
4962 */
4963 if (enmEffOpSize == IEMMODE_64BIT)
4964 {
4965 IEM_MC_BEGIN(3, 0);
4966 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4967 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
4968 IEM_MC_ARG(uint64_t, u64Enc, 2);
4969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4970 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4971 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
4972 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4973 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
4974 IEM_MC_END();
4975 }
4976 else
4977 {
4978 IEM_MC_BEGIN(3, 0);
4979 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4980 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
4981 IEM_MC_ARG(uint32_t, u32Enc, 2);
4982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4983 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4984 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
4985 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4986 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
4987 IEM_MC_END();
4988 }
4989 }
4990 return VINF_SUCCESS;
4991}
4992#else
4993FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4994#endif
4995/* Opcode 0x66 0x0f 0x79 - invalid */
4996/* Opcode 0xf3 0x0f 0x79 - invalid */
4997/* Opcode 0xf2 0x0f 0x79 - invalid */
4998
4999/* Opcode 0x0f 0x7a - invalid */
5000/* Opcode 0x66 0x0f 0x7a - invalid */
5001/* Opcode 0xf3 0x0f 0x7a - invalid */
5002/* Opcode 0xf2 0x0f 0x7a - invalid */
5003
5004/* Opcode 0x0f 0x7b - invalid */
5005/* Opcode 0x66 0x0f 0x7b - invalid */
5006/* Opcode 0xf3 0x0f 0x7b - invalid */
5007/* Opcode 0xf2 0x0f 0x7b - invalid */
5008
5009/* Opcode 0x0f 0x7c - invalid */
5010/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
5011FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
5012/* Opcode 0xf3 0x0f 0x7c - invalid */
5013/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
5014FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
5015
5016/* Opcode 0x0f 0x7d - invalid */
5017/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
5018FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
5019/* Opcode 0xf3 0x0f 0x7d - invalid */
5020/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
5021FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
5022
5023
5024/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
5025FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
5026{
5027 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5028 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5029 {
5030 /**
5031 * @opcode 0x7e
5032 * @opcodesub rex.w=1
5033 * @oppfx none
5034 * @opcpuid mmx
5035 * @opgroup og_mmx_datamove
5036 * @opxcpttype 5
5037 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
5038 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
5039 */
5040 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5041 if (IEM_IS_MODRM_REG_MODE(bRm))
5042 {
5043 /* greg64, MMX */
5044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5045 IEM_MC_BEGIN(0, 1);
5046 IEM_MC_LOCAL(uint64_t, u64Tmp);
5047
5048 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5049 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5050
5051 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5052 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5053 IEM_MC_FPU_TO_MMX_MODE();
5054
5055 IEM_MC_ADVANCE_RIP();
5056 IEM_MC_END();
5057 }
5058 else
5059 {
5060 /* [mem64], MMX */
5061 IEM_MC_BEGIN(0, 2);
5062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5063 IEM_MC_LOCAL(uint64_t, u64Tmp);
5064
5065 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5067 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5068 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5069
5070 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5071 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5072 IEM_MC_FPU_TO_MMX_MODE();
5073
5074 IEM_MC_ADVANCE_RIP();
5075 IEM_MC_END();
5076 }
5077 }
5078 else
5079 {
5080 /**
5081 * @opdone
5082 * @opcode 0x7e
5083 * @opcodesub rex.w=0
5084 * @oppfx none
5085 * @opcpuid mmx
5086 * @opgroup og_mmx_datamove
5087 * @opxcpttype 5
5088 * @opfunction iemOp_movd_q_Pd_Ey
5089 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
5090 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
5091 */
5092 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5093 if (IEM_IS_MODRM_REG_MODE(bRm))
5094 {
5095 /* greg32, MMX */
5096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5097 IEM_MC_BEGIN(0, 1);
5098 IEM_MC_LOCAL(uint32_t, u32Tmp);
5099
5100 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5101 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5102
5103 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
5104 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5105 IEM_MC_FPU_TO_MMX_MODE();
5106
5107 IEM_MC_ADVANCE_RIP();
5108 IEM_MC_END();
5109 }
5110 else
5111 {
5112 /* [mem32], MMX */
5113 IEM_MC_BEGIN(0, 2);
5114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5115 IEM_MC_LOCAL(uint32_t, u32Tmp);
5116
5117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5119 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5120 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5121
5122 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
5123 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5124 IEM_MC_FPU_TO_MMX_MODE();
5125
5126 IEM_MC_ADVANCE_RIP();
5127 IEM_MC_END();
5128 }
5129 }
5130 return VINF_SUCCESS;
5131
5132}
5133
5134
5135FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
5136{
5137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5138 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5139 {
5140 /**
5141 * @opcode 0x7e
5142 * @opcodesub rex.w=1
5143 * @oppfx 0x66
5144 * @opcpuid sse2
5145 * @opgroup og_sse2_simdint_datamove
5146 * @opxcpttype 5
5147 * @optest 64-bit / op1=1 op2=2 -> op1=2
5148 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
5149 */
5150 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5151 if (IEM_IS_MODRM_REG_MODE(bRm))
5152 {
5153 /* greg64, XMM */
5154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5155 IEM_MC_BEGIN(0, 1);
5156 IEM_MC_LOCAL(uint64_t, u64Tmp);
5157
5158 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5159 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5160
5161 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5162 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5163
5164 IEM_MC_ADVANCE_RIP();
5165 IEM_MC_END();
5166 }
5167 else
5168 {
5169 /* [mem64], XMM */
5170 IEM_MC_BEGIN(0, 2);
5171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5172 IEM_MC_LOCAL(uint64_t, u64Tmp);
5173
5174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5176 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5177 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5178
5179 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5180 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5181
5182 IEM_MC_ADVANCE_RIP();
5183 IEM_MC_END();
5184 }
5185 }
5186 else
5187 {
5188 /**
5189 * @opdone
5190 * @opcode 0x7e
5191 * @opcodesub rex.w=0
5192 * @oppfx 0x66
5193 * @opcpuid sse2
5194 * @opgroup og_sse2_simdint_datamove
5195 * @opxcpttype 5
5196 * @opfunction iemOp_movd_q_Vy_Ey
5197 * @optest op1=1 op2=2 -> op1=2
5198 * @optest op1=0 op2=-42 -> op1=-42
5199 */
5200 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5201 if (IEM_IS_MODRM_REG_MODE(bRm))
5202 {
5203 /* greg32, XMM */
5204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5205 IEM_MC_BEGIN(0, 1);
5206 IEM_MC_LOCAL(uint32_t, u32Tmp);
5207
5208 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5209 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5210
5211 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5212 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5213
5214 IEM_MC_ADVANCE_RIP();
5215 IEM_MC_END();
5216 }
5217 else
5218 {
5219 /* [mem32], XMM */
5220 IEM_MC_BEGIN(0, 2);
5221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5222 IEM_MC_LOCAL(uint32_t, u32Tmp);
5223
5224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5226 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5227 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5228
5229 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5230 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5231
5232 IEM_MC_ADVANCE_RIP();
5233 IEM_MC_END();
5234 }
5235 }
5236 return VINF_SUCCESS;
5237
5238}
5239
5240/**
5241 * @opcode 0x7e
5242 * @oppfx 0xf3
5243 * @opcpuid sse2
5244 * @opgroup og_sse2_pcksclr_datamove
5245 * @opxcpttype none
5246 * @optest op1=1 op2=2 -> op1=2
5247 * @optest op1=0 op2=-42 -> op1=-42
5248 */
5249FNIEMOP_DEF(iemOp_movq_Vq_Wq)
5250{
5251 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5252 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5253 if (IEM_IS_MODRM_REG_MODE(bRm))
5254 {
5255 /*
5256 * Register, register.
5257 */
5258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5259 IEM_MC_BEGIN(0, 2);
5260 IEM_MC_LOCAL(uint64_t, uSrc);
5261
5262 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5263 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5264
5265 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5266 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
5267
5268 IEM_MC_ADVANCE_RIP();
5269 IEM_MC_END();
5270 }
5271 else
5272 {
5273 /*
5274 * Memory, register.
5275 */
5276 IEM_MC_BEGIN(0, 2);
5277 IEM_MC_LOCAL(uint64_t, uSrc);
5278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5279
5280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5282 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5283 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5284
5285 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5286 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
5287
5288 IEM_MC_ADVANCE_RIP();
5289 IEM_MC_END();
5290 }
5291 return VINF_SUCCESS;
5292}
5293
5294/* Opcode 0xf2 0x0f 0x7e - invalid */
5295
5296
5297/** Opcode 0x0f 0x7f - movq Qq, Pq */
5298FNIEMOP_DEF(iemOp_movq_Qq_Pq)
5299{
5300 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
5301 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5302 if (IEM_IS_MODRM_REG_MODE(bRm))
5303 {
5304 /*
5305 * Register, register.
5306 */
5307 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
5308 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
5309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5310 IEM_MC_BEGIN(0, 1);
5311 IEM_MC_LOCAL(uint64_t, u64Tmp);
5312 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5313 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5314 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5315 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
5316 IEM_MC_FPU_TO_MMX_MODE();
5317 IEM_MC_ADVANCE_RIP();
5318 IEM_MC_END();
5319 }
5320 else
5321 {
5322 /*
5323 * Memory, Register.
5324 */
5325 IEM_MC_BEGIN(0, 2);
5326 IEM_MC_LOCAL(uint64_t, u64Tmp);
5327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5328
5329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5331 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5332 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5333
5334 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5335 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5336 IEM_MC_FPU_TO_MMX_MODE();
5337
5338 IEM_MC_ADVANCE_RIP();
5339 IEM_MC_END();
5340 }
5341 return VINF_SUCCESS;
5342}
5343
5344/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
5345FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
5346{
5347 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5348 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5349 if (IEM_IS_MODRM_REG_MODE(bRm))
5350 {
5351 /*
5352 * Register, register.
5353 */
5354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5355 IEM_MC_BEGIN(0, 0);
5356 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5357 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5358 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
5359 IEM_GET_MODRM_REG(pVCpu, bRm));
5360 IEM_MC_ADVANCE_RIP();
5361 IEM_MC_END();
5362 }
5363 else
5364 {
5365 /*
5366 * Register, memory.
5367 */
5368 IEM_MC_BEGIN(0, 2);
5369 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5371
5372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5374 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5375 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5376
5377 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5378 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5379
5380 IEM_MC_ADVANCE_RIP();
5381 IEM_MC_END();
5382 }
5383 return VINF_SUCCESS;
5384}
5385
5386/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
5387FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
5388{
5389 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5391 if (IEM_IS_MODRM_REG_MODE(bRm))
5392 {
5393 /*
5394 * Register, register.
5395 */
5396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5397 IEM_MC_BEGIN(0, 0);
5398 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5399 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5400 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
5401 IEM_GET_MODRM_REG(pVCpu, bRm));
5402 IEM_MC_ADVANCE_RIP();
5403 IEM_MC_END();
5404 }
5405 else
5406 {
5407 /*
5408 * Register, memory.
5409 */
5410 IEM_MC_BEGIN(0, 2);
5411 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5413
5414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5416 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5417 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5418
5419 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5420 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5421
5422 IEM_MC_ADVANCE_RIP();
5423 IEM_MC_END();
5424 }
5425 return VINF_SUCCESS;
5426}
5427
5428/* Opcode 0xf2 0x0f 0x7f - invalid */
5429
5430
5431
5432/** Opcode 0x0f 0x80. */
5433FNIEMOP_DEF(iemOp_jo_Jv)
5434{
5435 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
5436 IEMOP_HLP_MIN_386();
5437 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5438 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5439 {
5440 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5442
5443 IEM_MC_BEGIN(0, 0);
5444 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5445 IEM_MC_REL_JMP_S16(i16Imm);
5446 } IEM_MC_ELSE() {
5447 IEM_MC_ADVANCE_RIP();
5448 } IEM_MC_ENDIF();
5449 IEM_MC_END();
5450 }
5451 else
5452 {
5453 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5455
5456 IEM_MC_BEGIN(0, 0);
5457 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5458 IEM_MC_REL_JMP_S32(i32Imm);
5459 } IEM_MC_ELSE() {
5460 IEM_MC_ADVANCE_RIP();
5461 } IEM_MC_ENDIF();
5462 IEM_MC_END();
5463 }
5464 return VINF_SUCCESS;
5465}
5466
5467
5468/** Opcode 0x0f 0x81. */
5469FNIEMOP_DEF(iemOp_jno_Jv)
5470{
5471 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
5472 IEMOP_HLP_MIN_386();
5473 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5474 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5475 {
5476 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5478
5479 IEM_MC_BEGIN(0, 0);
5480 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5481 IEM_MC_ADVANCE_RIP();
5482 } IEM_MC_ELSE() {
5483 IEM_MC_REL_JMP_S16(i16Imm);
5484 } IEM_MC_ENDIF();
5485 IEM_MC_END();
5486 }
5487 else
5488 {
5489 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5491
5492 IEM_MC_BEGIN(0, 0);
5493 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5494 IEM_MC_ADVANCE_RIP();
5495 } IEM_MC_ELSE() {
5496 IEM_MC_REL_JMP_S32(i32Imm);
5497 } IEM_MC_ENDIF();
5498 IEM_MC_END();
5499 }
5500 return VINF_SUCCESS;
5501}
5502
5503
5504/** Opcode 0x0f 0x82. */
5505FNIEMOP_DEF(iemOp_jc_Jv)
5506{
5507 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
5508 IEMOP_HLP_MIN_386();
5509 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5510 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5511 {
5512 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5514
5515 IEM_MC_BEGIN(0, 0);
5516 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5517 IEM_MC_REL_JMP_S16(i16Imm);
5518 } IEM_MC_ELSE() {
5519 IEM_MC_ADVANCE_RIP();
5520 } IEM_MC_ENDIF();
5521 IEM_MC_END();
5522 }
5523 else
5524 {
5525 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5527
5528 IEM_MC_BEGIN(0, 0);
5529 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5530 IEM_MC_REL_JMP_S32(i32Imm);
5531 } IEM_MC_ELSE() {
5532 IEM_MC_ADVANCE_RIP();
5533 } IEM_MC_ENDIF();
5534 IEM_MC_END();
5535 }
5536 return VINF_SUCCESS;
5537}
5538
5539
5540/** Opcode 0x0f 0x83. */
5541FNIEMOP_DEF(iemOp_jnc_Jv)
5542{
5543 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
5544 IEMOP_HLP_MIN_386();
5545 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5546 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5547 {
5548 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5550
5551 IEM_MC_BEGIN(0, 0);
5552 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5553 IEM_MC_ADVANCE_RIP();
5554 } IEM_MC_ELSE() {
5555 IEM_MC_REL_JMP_S16(i16Imm);
5556 } IEM_MC_ENDIF();
5557 IEM_MC_END();
5558 }
5559 else
5560 {
5561 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5563
5564 IEM_MC_BEGIN(0, 0);
5565 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5566 IEM_MC_ADVANCE_RIP();
5567 } IEM_MC_ELSE() {
5568 IEM_MC_REL_JMP_S32(i32Imm);
5569 } IEM_MC_ENDIF();
5570 IEM_MC_END();
5571 }
5572 return VINF_SUCCESS;
5573}
5574
5575
5576/** Opcode 0x0f 0x84. */
5577FNIEMOP_DEF(iemOp_je_Jv)
5578{
5579 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
5580 IEMOP_HLP_MIN_386();
5581 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5582 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5583 {
5584 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5586
5587 IEM_MC_BEGIN(0, 0);
5588 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5589 IEM_MC_REL_JMP_S16(i16Imm);
5590 } IEM_MC_ELSE() {
5591 IEM_MC_ADVANCE_RIP();
5592 } IEM_MC_ENDIF();
5593 IEM_MC_END();
5594 }
5595 else
5596 {
5597 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5599
5600 IEM_MC_BEGIN(0, 0);
5601 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5602 IEM_MC_REL_JMP_S32(i32Imm);
5603 } IEM_MC_ELSE() {
5604 IEM_MC_ADVANCE_RIP();
5605 } IEM_MC_ENDIF();
5606 IEM_MC_END();
5607 }
5608 return VINF_SUCCESS;
5609}
5610
5611
5612/** Opcode 0x0f 0x85. */
5613FNIEMOP_DEF(iemOp_jne_Jv)
5614{
5615 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
5616 IEMOP_HLP_MIN_386();
5617 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5618 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5619 {
5620 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5622
5623 IEM_MC_BEGIN(0, 0);
5624 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5625 IEM_MC_ADVANCE_RIP();
5626 } IEM_MC_ELSE() {
5627 IEM_MC_REL_JMP_S16(i16Imm);
5628 } IEM_MC_ENDIF();
5629 IEM_MC_END();
5630 }
5631 else
5632 {
5633 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5635
5636 IEM_MC_BEGIN(0, 0);
5637 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5638 IEM_MC_ADVANCE_RIP();
5639 } IEM_MC_ELSE() {
5640 IEM_MC_REL_JMP_S32(i32Imm);
5641 } IEM_MC_ENDIF();
5642 IEM_MC_END();
5643 }
5644 return VINF_SUCCESS;
5645}
5646
5647
5648/** Opcode 0x0f 0x86. */
5649FNIEMOP_DEF(iemOp_jbe_Jv)
5650{
5651 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
5652 IEMOP_HLP_MIN_386();
5653 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5654 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5655 {
5656 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5658
5659 IEM_MC_BEGIN(0, 0);
5660 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5661 IEM_MC_REL_JMP_S16(i16Imm);
5662 } IEM_MC_ELSE() {
5663 IEM_MC_ADVANCE_RIP();
5664 } IEM_MC_ENDIF();
5665 IEM_MC_END();
5666 }
5667 else
5668 {
5669 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5671
5672 IEM_MC_BEGIN(0, 0);
5673 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5674 IEM_MC_REL_JMP_S32(i32Imm);
5675 } IEM_MC_ELSE() {
5676 IEM_MC_ADVANCE_RIP();
5677 } IEM_MC_ENDIF();
5678 IEM_MC_END();
5679 }
5680 return VINF_SUCCESS;
5681}
5682
5683
5684/** Opcode 0x0f 0x87. */
5685FNIEMOP_DEF(iemOp_jnbe_Jv)
5686{
5687 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
5688 IEMOP_HLP_MIN_386();
5689 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5690 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5691 {
5692 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5694
5695 IEM_MC_BEGIN(0, 0);
5696 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5697 IEM_MC_ADVANCE_RIP();
5698 } IEM_MC_ELSE() {
5699 IEM_MC_REL_JMP_S16(i16Imm);
5700 } IEM_MC_ENDIF();
5701 IEM_MC_END();
5702 }
5703 else
5704 {
5705 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5707
5708 IEM_MC_BEGIN(0, 0);
5709 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5710 IEM_MC_ADVANCE_RIP();
5711 } IEM_MC_ELSE() {
5712 IEM_MC_REL_JMP_S32(i32Imm);
5713 } IEM_MC_ENDIF();
5714 IEM_MC_END();
5715 }
5716 return VINF_SUCCESS;
5717}
5718
5719
5720/** Opcode 0x0f 0x88. */
5721FNIEMOP_DEF(iemOp_js_Jv)
5722{
5723 IEMOP_MNEMONIC(js_Jv, "js Jv");
5724 IEMOP_HLP_MIN_386();
5725 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5726 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5727 {
5728 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5730
5731 IEM_MC_BEGIN(0, 0);
5732 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5733 IEM_MC_REL_JMP_S16(i16Imm);
5734 } IEM_MC_ELSE() {
5735 IEM_MC_ADVANCE_RIP();
5736 } IEM_MC_ENDIF();
5737 IEM_MC_END();
5738 }
5739 else
5740 {
5741 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5743
5744 IEM_MC_BEGIN(0, 0);
5745 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5746 IEM_MC_REL_JMP_S32(i32Imm);
5747 } IEM_MC_ELSE() {
5748 IEM_MC_ADVANCE_RIP();
5749 } IEM_MC_ENDIF();
5750 IEM_MC_END();
5751 }
5752 return VINF_SUCCESS;
5753}
5754
5755
5756/** Opcode 0x0f 0x89. */
5757FNIEMOP_DEF(iemOp_jns_Jv)
5758{
5759 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5760 IEMOP_HLP_MIN_386();
5761 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5762 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5763 {
5764 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5766
5767 IEM_MC_BEGIN(0, 0);
5768 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5769 IEM_MC_ADVANCE_RIP();
5770 } IEM_MC_ELSE() {
5771 IEM_MC_REL_JMP_S16(i16Imm);
5772 } IEM_MC_ENDIF();
5773 IEM_MC_END();
5774 }
5775 else
5776 {
5777 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5779
5780 IEM_MC_BEGIN(0, 0);
5781 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5782 IEM_MC_ADVANCE_RIP();
5783 } IEM_MC_ELSE() {
5784 IEM_MC_REL_JMP_S32(i32Imm);
5785 } IEM_MC_ENDIF();
5786 IEM_MC_END();
5787 }
5788 return VINF_SUCCESS;
5789}
5790
5791
5792/** Opcode 0x0f 0x8a. */
5793FNIEMOP_DEF(iemOp_jp_Jv)
5794{
5795 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5796 IEMOP_HLP_MIN_386();
5797 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5798 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5799 {
5800 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5802
5803 IEM_MC_BEGIN(0, 0);
5804 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5805 IEM_MC_REL_JMP_S16(i16Imm);
5806 } IEM_MC_ELSE() {
5807 IEM_MC_ADVANCE_RIP();
5808 } IEM_MC_ENDIF();
5809 IEM_MC_END();
5810 }
5811 else
5812 {
5813 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5815
5816 IEM_MC_BEGIN(0, 0);
5817 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5818 IEM_MC_REL_JMP_S32(i32Imm);
5819 } IEM_MC_ELSE() {
5820 IEM_MC_ADVANCE_RIP();
5821 } IEM_MC_ENDIF();
5822 IEM_MC_END();
5823 }
5824 return VINF_SUCCESS;
5825}
5826
5827
5828/** Opcode 0x0f 0x8b. */
5829FNIEMOP_DEF(iemOp_jnp_Jv)
5830{
5831 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5832 IEMOP_HLP_MIN_386();
5833 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5834 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5835 {
5836 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5838
5839 IEM_MC_BEGIN(0, 0);
5840 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5841 IEM_MC_ADVANCE_RIP();
5842 } IEM_MC_ELSE() {
5843 IEM_MC_REL_JMP_S16(i16Imm);
5844 } IEM_MC_ENDIF();
5845 IEM_MC_END();
5846 }
5847 else
5848 {
5849 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5851
5852 IEM_MC_BEGIN(0, 0);
5853 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5854 IEM_MC_ADVANCE_RIP();
5855 } IEM_MC_ELSE() {
5856 IEM_MC_REL_JMP_S32(i32Imm);
5857 } IEM_MC_ENDIF();
5858 IEM_MC_END();
5859 }
5860 return VINF_SUCCESS;
5861}
5862
5863
5864/** Opcode 0x0f 0x8c. */
5865FNIEMOP_DEF(iemOp_jl_Jv)
5866{
5867 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5868 IEMOP_HLP_MIN_386();
5869 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5870 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5871 {
5872 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5874
5875 IEM_MC_BEGIN(0, 0);
5876 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5877 IEM_MC_REL_JMP_S16(i16Imm);
5878 } IEM_MC_ELSE() {
5879 IEM_MC_ADVANCE_RIP();
5880 } IEM_MC_ENDIF();
5881 IEM_MC_END();
5882 }
5883 else
5884 {
5885 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5887
5888 IEM_MC_BEGIN(0, 0);
5889 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5890 IEM_MC_REL_JMP_S32(i32Imm);
5891 } IEM_MC_ELSE() {
5892 IEM_MC_ADVANCE_RIP();
5893 } IEM_MC_ENDIF();
5894 IEM_MC_END();
5895 }
5896 return VINF_SUCCESS;
5897}
5898
5899
5900/** Opcode 0x0f 0x8d. */
5901FNIEMOP_DEF(iemOp_jnl_Jv)
5902{
5903 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5904 IEMOP_HLP_MIN_386();
5905 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5906 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5907 {
5908 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5910
5911 IEM_MC_BEGIN(0, 0);
5912 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5913 IEM_MC_ADVANCE_RIP();
5914 } IEM_MC_ELSE() {
5915 IEM_MC_REL_JMP_S16(i16Imm);
5916 } IEM_MC_ENDIF();
5917 IEM_MC_END();
5918 }
5919 else
5920 {
5921 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5923
5924 IEM_MC_BEGIN(0, 0);
5925 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5926 IEM_MC_ADVANCE_RIP();
5927 } IEM_MC_ELSE() {
5928 IEM_MC_REL_JMP_S32(i32Imm);
5929 } IEM_MC_ENDIF();
5930 IEM_MC_END();
5931 }
5932 return VINF_SUCCESS;
5933}
5934
5935
5936/** Opcode 0x0f 0x8e. */
5937FNIEMOP_DEF(iemOp_jle_Jv)
5938{
5939 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5940 IEMOP_HLP_MIN_386();
5941 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5942 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5943 {
5944 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5946
5947 IEM_MC_BEGIN(0, 0);
5948 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5949 IEM_MC_REL_JMP_S16(i16Imm);
5950 } IEM_MC_ELSE() {
5951 IEM_MC_ADVANCE_RIP();
5952 } IEM_MC_ENDIF();
5953 IEM_MC_END();
5954 }
5955 else
5956 {
5957 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5959
5960 IEM_MC_BEGIN(0, 0);
5961 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5962 IEM_MC_REL_JMP_S32(i32Imm);
5963 } IEM_MC_ELSE() {
5964 IEM_MC_ADVANCE_RIP();
5965 } IEM_MC_ENDIF();
5966 IEM_MC_END();
5967 }
5968 return VINF_SUCCESS;
5969}
5970
5971
5972/** Opcode 0x0f 0x8f. */
5973FNIEMOP_DEF(iemOp_jnle_Jv)
5974{
5975 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5976 IEMOP_HLP_MIN_386();
5977 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5978 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5979 {
5980 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5982
5983 IEM_MC_BEGIN(0, 0);
5984 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5985 IEM_MC_ADVANCE_RIP();
5986 } IEM_MC_ELSE() {
5987 IEM_MC_REL_JMP_S16(i16Imm);
5988 } IEM_MC_ENDIF();
5989 IEM_MC_END();
5990 }
5991 else
5992 {
5993 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5995
5996 IEM_MC_BEGIN(0, 0);
5997 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5998 IEM_MC_ADVANCE_RIP();
5999 } IEM_MC_ELSE() {
6000 IEM_MC_REL_JMP_S32(i32Imm);
6001 } IEM_MC_ENDIF();
6002 IEM_MC_END();
6003 }
6004 return VINF_SUCCESS;
6005}
6006
6007
6008/** Opcode 0x0f 0x90. */
6009FNIEMOP_DEF(iemOp_seto_Eb)
6010{
6011 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
6012 IEMOP_HLP_MIN_386();
6013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6014
6015 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6016 * any way. AMD says it's "unused", whatever that means. We're
6017 * ignoring for now. */
6018 if (IEM_IS_MODRM_REG_MODE(bRm))
6019 {
6020 /* register target */
6021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6022 IEM_MC_BEGIN(0, 0);
6023 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6024 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6025 } IEM_MC_ELSE() {
6026 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6027 } IEM_MC_ENDIF();
6028 IEM_MC_ADVANCE_RIP();
6029 IEM_MC_END();
6030 }
6031 else
6032 {
6033 /* memory target */
6034 IEM_MC_BEGIN(0, 1);
6035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6038 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6039 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6040 } IEM_MC_ELSE() {
6041 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6042 } IEM_MC_ENDIF();
6043 IEM_MC_ADVANCE_RIP();
6044 IEM_MC_END();
6045 }
6046 return VINF_SUCCESS;
6047}
6048
6049
6050/** Opcode 0x0f 0x91. */
6051FNIEMOP_DEF(iemOp_setno_Eb)
6052{
6053 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
6054 IEMOP_HLP_MIN_386();
6055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6056
6057 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6058 * any way. AMD says it's "unused", whatever that means. We're
6059 * ignoring for now. */
6060 if (IEM_IS_MODRM_REG_MODE(bRm))
6061 {
6062 /* register target */
6063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6064 IEM_MC_BEGIN(0, 0);
6065 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6066 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6067 } IEM_MC_ELSE() {
6068 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6069 } IEM_MC_ENDIF();
6070 IEM_MC_ADVANCE_RIP();
6071 IEM_MC_END();
6072 }
6073 else
6074 {
6075 /* memory target */
6076 IEM_MC_BEGIN(0, 1);
6077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6080 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6081 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6082 } IEM_MC_ELSE() {
6083 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6084 } IEM_MC_ENDIF();
6085 IEM_MC_ADVANCE_RIP();
6086 IEM_MC_END();
6087 }
6088 return VINF_SUCCESS;
6089}
6090
6091
6092/** Opcode 0x0f 0x92. */
6093FNIEMOP_DEF(iemOp_setc_Eb)
6094{
6095 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
6096 IEMOP_HLP_MIN_386();
6097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6098
6099 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6100 * any way. AMD says it's "unused", whatever that means. We're
6101 * ignoring for now. */
6102 if (IEM_IS_MODRM_REG_MODE(bRm))
6103 {
6104 /* register target */
6105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6106 IEM_MC_BEGIN(0, 0);
6107 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6108 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6109 } IEM_MC_ELSE() {
6110 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6111 } IEM_MC_ENDIF();
6112 IEM_MC_ADVANCE_RIP();
6113 IEM_MC_END();
6114 }
6115 else
6116 {
6117 /* memory target */
6118 IEM_MC_BEGIN(0, 1);
6119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6122 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6123 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6124 } IEM_MC_ELSE() {
6125 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6126 } IEM_MC_ENDIF();
6127 IEM_MC_ADVANCE_RIP();
6128 IEM_MC_END();
6129 }
6130 return VINF_SUCCESS;
6131}
6132
6133
6134/** Opcode 0x0f 0x93. */
6135FNIEMOP_DEF(iemOp_setnc_Eb)
6136{
6137 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
6138 IEMOP_HLP_MIN_386();
6139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6140
6141 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6142 * any way. AMD says it's "unused", whatever that means. We're
6143 * ignoring for now. */
6144 if (IEM_IS_MODRM_REG_MODE(bRm))
6145 {
6146 /* register target */
6147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6148 IEM_MC_BEGIN(0, 0);
6149 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6150 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6151 } IEM_MC_ELSE() {
6152 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6153 } IEM_MC_ENDIF();
6154 IEM_MC_ADVANCE_RIP();
6155 IEM_MC_END();
6156 }
6157 else
6158 {
6159 /* memory target */
6160 IEM_MC_BEGIN(0, 1);
6161 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6164 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6165 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6166 } IEM_MC_ELSE() {
6167 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6168 } IEM_MC_ENDIF();
6169 IEM_MC_ADVANCE_RIP();
6170 IEM_MC_END();
6171 }
6172 return VINF_SUCCESS;
6173}
6174
6175
6176/** Opcode 0x0f 0x94. */
6177FNIEMOP_DEF(iemOp_sete_Eb)
6178{
6179 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
6180 IEMOP_HLP_MIN_386();
6181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6182
6183 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6184 * any way. AMD says it's "unused", whatever that means. We're
6185 * ignoring for now. */
6186 if (IEM_IS_MODRM_REG_MODE(bRm))
6187 {
6188 /* register target */
6189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6190 IEM_MC_BEGIN(0, 0);
6191 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6192 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6193 } IEM_MC_ELSE() {
6194 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6195 } IEM_MC_ENDIF();
6196 IEM_MC_ADVANCE_RIP();
6197 IEM_MC_END();
6198 }
6199 else
6200 {
6201 /* memory target */
6202 IEM_MC_BEGIN(0, 1);
6203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6206 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6207 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6208 } IEM_MC_ELSE() {
6209 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6210 } IEM_MC_ENDIF();
6211 IEM_MC_ADVANCE_RIP();
6212 IEM_MC_END();
6213 }
6214 return VINF_SUCCESS;
6215}
6216
6217
6218/** Opcode 0x0f 0x95. */
6219FNIEMOP_DEF(iemOp_setne_Eb)
6220{
6221 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
6222 IEMOP_HLP_MIN_386();
6223 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6224
6225 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6226 * any way. AMD says it's "unused", whatever that means. We're
6227 * ignoring for now. */
6228 if (IEM_IS_MODRM_REG_MODE(bRm))
6229 {
6230 /* register target */
6231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6232 IEM_MC_BEGIN(0, 0);
6233 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6234 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6235 } IEM_MC_ELSE() {
6236 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6237 } IEM_MC_ENDIF();
6238 IEM_MC_ADVANCE_RIP();
6239 IEM_MC_END();
6240 }
6241 else
6242 {
6243 /* memory target */
6244 IEM_MC_BEGIN(0, 1);
6245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6248 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6249 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6250 } IEM_MC_ELSE() {
6251 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6252 } IEM_MC_ENDIF();
6253 IEM_MC_ADVANCE_RIP();
6254 IEM_MC_END();
6255 }
6256 return VINF_SUCCESS;
6257}
6258
6259
6260/** Opcode 0x0f 0x96. */
6261FNIEMOP_DEF(iemOp_setbe_Eb)
6262{
6263 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
6264 IEMOP_HLP_MIN_386();
6265 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6266
6267 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6268 * any way. AMD says it's "unused", whatever that means. We're
6269 * ignoring for now. */
6270 if (IEM_IS_MODRM_REG_MODE(bRm))
6271 {
6272 /* register target */
6273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6274 IEM_MC_BEGIN(0, 0);
6275 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6276 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6277 } IEM_MC_ELSE() {
6278 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6279 } IEM_MC_ENDIF();
6280 IEM_MC_ADVANCE_RIP();
6281 IEM_MC_END();
6282 }
6283 else
6284 {
6285 /* memory target */
6286 IEM_MC_BEGIN(0, 1);
6287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6290 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6291 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6292 } IEM_MC_ELSE() {
6293 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6294 } IEM_MC_ENDIF();
6295 IEM_MC_ADVANCE_RIP();
6296 IEM_MC_END();
6297 }
6298 return VINF_SUCCESS;
6299}
6300
6301
6302/** Opcode 0x0f 0x97. */
6303FNIEMOP_DEF(iemOp_setnbe_Eb)
6304{
6305 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
6306 IEMOP_HLP_MIN_386();
6307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6308
6309 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6310 * any way. AMD says it's "unused", whatever that means. We're
6311 * ignoring for now. */
6312 if (IEM_IS_MODRM_REG_MODE(bRm))
6313 {
6314 /* register target */
6315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6316 IEM_MC_BEGIN(0, 0);
6317 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6318 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6319 } IEM_MC_ELSE() {
6320 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6321 } IEM_MC_ENDIF();
6322 IEM_MC_ADVANCE_RIP();
6323 IEM_MC_END();
6324 }
6325 else
6326 {
6327 /* memory target */
6328 IEM_MC_BEGIN(0, 1);
6329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6332 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6333 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6334 } IEM_MC_ELSE() {
6335 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6336 } IEM_MC_ENDIF();
6337 IEM_MC_ADVANCE_RIP();
6338 IEM_MC_END();
6339 }
6340 return VINF_SUCCESS;
6341}
6342
6343
6344/** Opcode 0x0f 0x98. */
6345FNIEMOP_DEF(iemOp_sets_Eb)
6346{
6347 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
6348 IEMOP_HLP_MIN_386();
6349 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6350
6351 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6352 * any way. AMD says it's "unused", whatever that means. We're
6353 * ignoring for now. */
6354 if (IEM_IS_MODRM_REG_MODE(bRm))
6355 {
6356 /* register target */
6357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6358 IEM_MC_BEGIN(0, 0);
6359 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6360 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6361 } IEM_MC_ELSE() {
6362 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6363 } IEM_MC_ENDIF();
6364 IEM_MC_ADVANCE_RIP();
6365 IEM_MC_END();
6366 }
6367 else
6368 {
6369 /* memory target */
6370 IEM_MC_BEGIN(0, 1);
6371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6374 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6375 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6376 } IEM_MC_ELSE() {
6377 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6378 } IEM_MC_ENDIF();
6379 IEM_MC_ADVANCE_RIP();
6380 IEM_MC_END();
6381 }
6382 return VINF_SUCCESS;
6383}
6384
6385
6386/** Opcode 0x0f 0x99. */
6387FNIEMOP_DEF(iemOp_setns_Eb)
6388{
6389 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
6390 IEMOP_HLP_MIN_386();
6391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6392
6393 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6394 * any way. AMD says it's "unused", whatever that means. We're
6395 * ignoring for now. */
6396 if (IEM_IS_MODRM_REG_MODE(bRm))
6397 {
6398 /* register target */
6399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6400 IEM_MC_BEGIN(0, 0);
6401 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6402 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6403 } IEM_MC_ELSE() {
6404 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6405 } IEM_MC_ENDIF();
6406 IEM_MC_ADVANCE_RIP();
6407 IEM_MC_END();
6408 }
6409 else
6410 {
6411 /* memory target */
6412 IEM_MC_BEGIN(0, 1);
6413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6416 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6417 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6418 } IEM_MC_ELSE() {
6419 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6420 } IEM_MC_ENDIF();
6421 IEM_MC_ADVANCE_RIP();
6422 IEM_MC_END();
6423 }
6424 return VINF_SUCCESS;
6425}
6426
6427
6428/** Opcode 0x0f 0x9a. */
6429FNIEMOP_DEF(iemOp_setp_Eb)
6430{
6431 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
6432 IEMOP_HLP_MIN_386();
6433 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6434
6435 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6436 * any way. AMD says it's "unused", whatever that means. We're
6437 * ignoring for now. */
6438 if (IEM_IS_MODRM_REG_MODE(bRm))
6439 {
6440 /* register target */
6441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6442 IEM_MC_BEGIN(0, 0);
6443 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6444 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6445 } IEM_MC_ELSE() {
6446 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6447 } IEM_MC_ENDIF();
6448 IEM_MC_ADVANCE_RIP();
6449 IEM_MC_END();
6450 }
6451 else
6452 {
6453 /* memory target */
6454 IEM_MC_BEGIN(0, 1);
6455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6458 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6459 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6460 } IEM_MC_ELSE() {
6461 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6462 } IEM_MC_ENDIF();
6463 IEM_MC_ADVANCE_RIP();
6464 IEM_MC_END();
6465 }
6466 return VINF_SUCCESS;
6467}
6468
6469
6470/** Opcode 0x0f 0x9b. */
6471FNIEMOP_DEF(iemOp_setnp_Eb)
6472{
6473 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
6474 IEMOP_HLP_MIN_386();
6475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6476
6477 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6478 * any way. AMD says it's "unused", whatever that means. We're
6479 * ignoring for now. */
6480 if (IEM_IS_MODRM_REG_MODE(bRm))
6481 {
6482 /* register target */
6483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6484 IEM_MC_BEGIN(0, 0);
6485 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6486 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6487 } IEM_MC_ELSE() {
6488 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6489 } IEM_MC_ENDIF();
6490 IEM_MC_ADVANCE_RIP();
6491 IEM_MC_END();
6492 }
6493 else
6494 {
6495 /* memory target */
6496 IEM_MC_BEGIN(0, 1);
6497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6500 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6501 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6502 } IEM_MC_ELSE() {
6503 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6504 } IEM_MC_ENDIF();
6505 IEM_MC_ADVANCE_RIP();
6506 IEM_MC_END();
6507 }
6508 return VINF_SUCCESS;
6509}
6510
6511
6512/** Opcode 0x0f 0x9c. */
6513FNIEMOP_DEF(iemOp_setl_Eb)
6514{
6515 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
6516 IEMOP_HLP_MIN_386();
6517 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6518
6519 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6520 * any way. AMD says it's "unused", whatever that means. We're
6521 * ignoring for now. */
6522 if (IEM_IS_MODRM_REG_MODE(bRm))
6523 {
6524 /* register target */
6525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6526 IEM_MC_BEGIN(0, 0);
6527 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6528 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6529 } IEM_MC_ELSE() {
6530 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6531 } IEM_MC_ENDIF();
6532 IEM_MC_ADVANCE_RIP();
6533 IEM_MC_END();
6534 }
6535 else
6536 {
6537 /* memory target */
6538 IEM_MC_BEGIN(0, 1);
6539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6542 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6543 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6544 } IEM_MC_ELSE() {
6545 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6546 } IEM_MC_ENDIF();
6547 IEM_MC_ADVANCE_RIP();
6548 IEM_MC_END();
6549 }
6550 return VINF_SUCCESS;
6551}
6552
6553
6554/** Opcode 0x0f 0x9d. */
6555FNIEMOP_DEF(iemOp_setnl_Eb)
6556{
6557 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
6558 IEMOP_HLP_MIN_386();
6559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6560
6561 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6562 * any way. AMD says it's "unused", whatever that means. We're
6563 * ignoring for now. */
6564 if (IEM_IS_MODRM_REG_MODE(bRm))
6565 {
6566 /* register target */
6567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6568 IEM_MC_BEGIN(0, 0);
6569 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6570 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6571 } IEM_MC_ELSE() {
6572 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6573 } IEM_MC_ENDIF();
6574 IEM_MC_ADVANCE_RIP();
6575 IEM_MC_END();
6576 }
6577 else
6578 {
6579 /* memory target */
6580 IEM_MC_BEGIN(0, 1);
6581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6584 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6585 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6586 } IEM_MC_ELSE() {
6587 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6588 } IEM_MC_ENDIF();
6589 IEM_MC_ADVANCE_RIP();
6590 IEM_MC_END();
6591 }
6592 return VINF_SUCCESS;
6593}
6594
6595
6596/** Opcode 0x0f 0x9e. */
6597FNIEMOP_DEF(iemOp_setle_Eb)
6598{
6599 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
6600 IEMOP_HLP_MIN_386();
6601 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6602
6603 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6604 * any way. AMD says it's "unused", whatever that means. We're
6605 * ignoring for now. */
6606 if (IEM_IS_MODRM_REG_MODE(bRm))
6607 {
6608 /* register target */
6609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6610 IEM_MC_BEGIN(0, 0);
6611 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6612 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6613 } IEM_MC_ELSE() {
6614 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6615 } IEM_MC_ENDIF();
6616 IEM_MC_ADVANCE_RIP();
6617 IEM_MC_END();
6618 }
6619 else
6620 {
6621 /* memory target */
6622 IEM_MC_BEGIN(0, 1);
6623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6626 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6627 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6628 } IEM_MC_ELSE() {
6629 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6630 } IEM_MC_ENDIF();
6631 IEM_MC_ADVANCE_RIP();
6632 IEM_MC_END();
6633 }
6634 return VINF_SUCCESS;
6635}
6636
6637
6638/** Opcode 0x0f 0x9f. */
6639FNIEMOP_DEF(iemOp_setnle_Eb)
6640{
6641 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
6642 IEMOP_HLP_MIN_386();
6643 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6644
6645 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6646 * any way. AMD says it's "unused", whatever that means. We're
6647 * ignoring for now. */
6648 if (IEM_IS_MODRM_REG_MODE(bRm))
6649 {
6650 /* register target */
6651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6652 IEM_MC_BEGIN(0, 0);
6653 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6654 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6655 } IEM_MC_ELSE() {
6656 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6657 } IEM_MC_ENDIF();
6658 IEM_MC_ADVANCE_RIP();
6659 IEM_MC_END();
6660 }
6661 else
6662 {
6663 /* memory target */
6664 IEM_MC_BEGIN(0, 1);
6665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6668 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6669 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6670 } IEM_MC_ELSE() {
6671 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6672 } IEM_MC_ENDIF();
6673 IEM_MC_ADVANCE_RIP();
6674 IEM_MC_END();
6675 }
6676 return VINF_SUCCESS;
6677}
6678
6679
6680/**
6681 * Common 'push segment-register' helper.
6682 */
6683FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
6684{
6685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6686 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
6687 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6688
6689 switch (pVCpu->iem.s.enmEffOpSize)
6690 {
6691 case IEMMODE_16BIT:
6692 IEM_MC_BEGIN(0, 1);
6693 IEM_MC_LOCAL(uint16_t, u16Value);
6694 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
6695 IEM_MC_PUSH_U16(u16Value);
6696 IEM_MC_ADVANCE_RIP();
6697 IEM_MC_END();
6698 break;
6699
6700 case IEMMODE_32BIT:
6701 IEM_MC_BEGIN(0, 1);
6702 IEM_MC_LOCAL(uint32_t, u32Value);
6703 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
6704 IEM_MC_PUSH_U32_SREG(u32Value);
6705 IEM_MC_ADVANCE_RIP();
6706 IEM_MC_END();
6707 break;
6708
6709 case IEMMODE_64BIT:
6710 IEM_MC_BEGIN(0, 1);
6711 IEM_MC_LOCAL(uint64_t, u64Value);
6712 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
6713 IEM_MC_PUSH_U64(u64Value);
6714 IEM_MC_ADVANCE_RIP();
6715 IEM_MC_END();
6716 break;
6717 }
6718
6719 return VINF_SUCCESS;
6720}
6721
6722
6723/** Opcode 0x0f 0xa0. */
6724FNIEMOP_DEF(iemOp_push_fs)
6725{
6726 IEMOP_MNEMONIC(push_fs, "push fs");
6727 IEMOP_HLP_MIN_386();
6728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6729 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
6730}
6731
6732
6733/** Opcode 0x0f 0xa1. */
6734FNIEMOP_DEF(iemOp_pop_fs)
6735{
6736 IEMOP_MNEMONIC(pop_fs, "pop fs");
6737 IEMOP_HLP_MIN_386();
6738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6739 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
6740}
6741
6742
6743/** Opcode 0x0f 0xa2. */
6744FNIEMOP_DEF(iemOp_cpuid)
6745{
6746 IEMOP_MNEMONIC(cpuid, "cpuid");
6747 IEMOP_HLP_MIN_486(); /* not all 486es. */
6748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6749 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
6750}
6751
6752
6753/**
6754 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
6755 * iemOp_bts_Ev_Gv.
6756 */
6757FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6758{
6759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6760 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6761
6762 if (IEM_IS_MODRM_REG_MODE(bRm))
6763 {
6764 /* register destination. */
6765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6766 switch (pVCpu->iem.s.enmEffOpSize)
6767 {
6768 case IEMMODE_16BIT:
6769 IEM_MC_BEGIN(3, 0);
6770 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6771 IEM_MC_ARG(uint16_t, u16Src, 1);
6772 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6773
6774 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6775 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6776 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6777 IEM_MC_REF_EFLAGS(pEFlags);
6778 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6779
6780 IEM_MC_ADVANCE_RIP();
6781 IEM_MC_END();
6782 return VINF_SUCCESS;
6783
6784 case IEMMODE_32BIT:
6785 IEM_MC_BEGIN(3, 0);
6786 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6787 IEM_MC_ARG(uint32_t, u32Src, 1);
6788 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6789
6790 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6791 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6792 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6793 IEM_MC_REF_EFLAGS(pEFlags);
6794 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6795
6796 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6797 IEM_MC_ADVANCE_RIP();
6798 IEM_MC_END();
6799 return VINF_SUCCESS;
6800
6801 case IEMMODE_64BIT:
6802 IEM_MC_BEGIN(3, 0);
6803 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6804 IEM_MC_ARG(uint64_t, u64Src, 1);
6805 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6806
6807 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6808 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6809 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6810 IEM_MC_REF_EFLAGS(pEFlags);
6811 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6812
6813 IEM_MC_ADVANCE_RIP();
6814 IEM_MC_END();
6815 return VINF_SUCCESS;
6816
6817 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6818 }
6819 }
6820 else
6821 {
6822 /* memory destination. */
6823
6824 uint32_t fAccess;
6825 if (pImpl->pfnLockedU16)
6826 fAccess = IEM_ACCESS_DATA_RW;
6827 else /* BT */
6828 fAccess = IEM_ACCESS_DATA_R;
6829
6830 /** @todo test negative bit offsets! */
6831 switch (pVCpu->iem.s.enmEffOpSize)
6832 {
6833 case IEMMODE_16BIT:
6834 IEM_MC_BEGIN(3, 2);
6835 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6836 IEM_MC_ARG(uint16_t, u16Src, 1);
6837 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6838 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6839 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6840
6841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6842 if (pImpl->pfnLockedU16)
6843 IEMOP_HLP_DONE_DECODING();
6844 else
6845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6846 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6847 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6848 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6849 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6850 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6851 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6852 IEM_MC_FETCH_EFLAGS(EFlags);
6853
6854 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6855 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6856 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6857 else
6858 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6859 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6860
6861 IEM_MC_COMMIT_EFLAGS(EFlags);
6862 IEM_MC_ADVANCE_RIP();
6863 IEM_MC_END();
6864 return VINF_SUCCESS;
6865
6866 case IEMMODE_32BIT:
6867 IEM_MC_BEGIN(3, 2);
6868 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6869 IEM_MC_ARG(uint32_t, u32Src, 1);
6870 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6872 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6873
6874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6875 if (pImpl->pfnLockedU16)
6876 IEMOP_HLP_DONE_DECODING();
6877 else
6878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6879 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6880 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6881 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6882 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6883 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6884 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6885 IEM_MC_FETCH_EFLAGS(EFlags);
6886
6887 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6888 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6889 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6890 else
6891 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6892 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6893
6894 IEM_MC_COMMIT_EFLAGS(EFlags);
6895 IEM_MC_ADVANCE_RIP();
6896 IEM_MC_END();
6897 return VINF_SUCCESS;
6898
6899 case IEMMODE_64BIT:
6900 IEM_MC_BEGIN(3, 2);
6901 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6902 IEM_MC_ARG(uint64_t, u64Src, 1);
6903 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6905 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6906
6907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6908 if (pImpl->pfnLockedU16)
6909 IEMOP_HLP_DONE_DECODING();
6910 else
6911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6912 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6913 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6914 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6915 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6916 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6917 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6918 IEM_MC_FETCH_EFLAGS(EFlags);
6919
6920 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6921 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6922 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6923 else
6924 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6925 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6926
6927 IEM_MC_COMMIT_EFLAGS(EFlags);
6928 IEM_MC_ADVANCE_RIP();
6929 IEM_MC_END();
6930 return VINF_SUCCESS;
6931
6932 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6933 }
6934 }
6935}
6936
6937
6938/** Opcode 0x0f 0xa3. */
6939FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6940{
6941 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6942 IEMOP_HLP_MIN_386();
6943 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6944}
6945
6946
6947/**
6948 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6949 */
6950FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6951{
6952 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6953 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6954
6955 if (IEM_IS_MODRM_REG_MODE(bRm))
6956 {
6957 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6959
6960 switch (pVCpu->iem.s.enmEffOpSize)
6961 {
6962 case IEMMODE_16BIT:
6963 IEM_MC_BEGIN(4, 0);
6964 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6965 IEM_MC_ARG(uint16_t, u16Src, 1);
6966 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6967 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6968
6969 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6970 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6971 IEM_MC_REF_EFLAGS(pEFlags);
6972 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6973
6974 IEM_MC_ADVANCE_RIP();
6975 IEM_MC_END();
6976 return VINF_SUCCESS;
6977
6978 case IEMMODE_32BIT:
6979 IEM_MC_BEGIN(4, 0);
6980 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6981 IEM_MC_ARG(uint32_t, u32Src, 1);
6982 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6983 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6984
6985 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6986 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6987 IEM_MC_REF_EFLAGS(pEFlags);
6988 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6989
6990 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6991 IEM_MC_ADVANCE_RIP();
6992 IEM_MC_END();
6993 return VINF_SUCCESS;
6994
6995 case IEMMODE_64BIT:
6996 IEM_MC_BEGIN(4, 0);
6997 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6998 IEM_MC_ARG(uint64_t, u64Src, 1);
6999 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7000 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7001
7002 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7003 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7004 IEM_MC_REF_EFLAGS(pEFlags);
7005 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7006
7007 IEM_MC_ADVANCE_RIP();
7008 IEM_MC_END();
7009 return VINF_SUCCESS;
7010
7011 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7012 }
7013 }
7014 else
7015 {
7016 switch (pVCpu->iem.s.enmEffOpSize)
7017 {
7018 case IEMMODE_16BIT:
7019 IEM_MC_BEGIN(4, 2);
7020 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7021 IEM_MC_ARG(uint16_t, u16Src, 1);
7022 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7023 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7025
7026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7027 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7028 IEM_MC_ASSIGN(cShiftArg, cShift);
7029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7030 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7031 IEM_MC_FETCH_EFLAGS(EFlags);
7032 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7033 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7034
7035 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7036 IEM_MC_COMMIT_EFLAGS(EFlags);
7037 IEM_MC_ADVANCE_RIP();
7038 IEM_MC_END();
7039 return VINF_SUCCESS;
7040
7041 case IEMMODE_32BIT:
7042 IEM_MC_BEGIN(4, 2);
7043 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7044 IEM_MC_ARG(uint32_t, u32Src, 1);
7045 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7046 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7048
7049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7050 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7051 IEM_MC_ASSIGN(cShiftArg, cShift);
7052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7053 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7054 IEM_MC_FETCH_EFLAGS(EFlags);
7055 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7056 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7057
7058 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7059 IEM_MC_COMMIT_EFLAGS(EFlags);
7060 IEM_MC_ADVANCE_RIP();
7061 IEM_MC_END();
7062 return VINF_SUCCESS;
7063
7064 case IEMMODE_64BIT:
7065 IEM_MC_BEGIN(4, 2);
7066 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7067 IEM_MC_ARG(uint64_t, u64Src, 1);
7068 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7069 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7071
7072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7073 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7074 IEM_MC_ASSIGN(cShiftArg, cShift);
7075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7076 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7077 IEM_MC_FETCH_EFLAGS(EFlags);
7078 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7079 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7080
7081 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7082 IEM_MC_COMMIT_EFLAGS(EFlags);
7083 IEM_MC_ADVANCE_RIP();
7084 IEM_MC_END();
7085 return VINF_SUCCESS;
7086
7087 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7088 }
7089 }
7090}
7091
7092
7093/**
7094 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
7095 */
7096FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
7097{
7098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7099 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
7100
7101 if (IEM_IS_MODRM_REG_MODE(bRm))
7102 {
7103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7104
7105 switch (pVCpu->iem.s.enmEffOpSize)
7106 {
7107 case IEMMODE_16BIT:
7108 IEM_MC_BEGIN(4, 0);
7109 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7110 IEM_MC_ARG(uint16_t, u16Src, 1);
7111 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7112 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7113
7114 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7115 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7116 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7117 IEM_MC_REF_EFLAGS(pEFlags);
7118 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7119
7120 IEM_MC_ADVANCE_RIP();
7121 IEM_MC_END();
7122 return VINF_SUCCESS;
7123
7124 case IEMMODE_32BIT:
7125 IEM_MC_BEGIN(4, 0);
7126 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7127 IEM_MC_ARG(uint32_t, u32Src, 1);
7128 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7129 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7130
7131 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7132 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7133 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7134 IEM_MC_REF_EFLAGS(pEFlags);
7135 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7136
7137 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7138 IEM_MC_ADVANCE_RIP();
7139 IEM_MC_END();
7140 return VINF_SUCCESS;
7141
7142 case IEMMODE_64BIT:
7143 IEM_MC_BEGIN(4, 0);
7144 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7145 IEM_MC_ARG(uint64_t, u64Src, 1);
7146 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7147 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7148
7149 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7150 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7151 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7152 IEM_MC_REF_EFLAGS(pEFlags);
7153 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7154
7155 IEM_MC_ADVANCE_RIP();
7156 IEM_MC_END();
7157 return VINF_SUCCESS;
7158
7159 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7160 }
7161 }
7162 else
7163 {
7164 switch (pVCpu->iem.s.enmEffOpSize)
7165 {
7166 case IEMMODE_16BIT:
7167 IEM_MC_BEGIN(4, 2);
7168 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7169 IEM_MC_ARG(uint16_t, u16Src, 1);
7170 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7171 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7173
7174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7176 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7177 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7178 IEM_MC_FETCH_EFLAGS(EFlags);
7179 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7180 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7181
7182 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7183 IEM_MC_COMMIT_EFLAGS(EFlags);
7184 IEM_MC_ADVANCE_RIP();
7185 IEM_MC_END();
7186 return VINF_SUCCESS;
7187
7188 case IEMMODE_32BIT:
7189 IEM_MC_BEGIN(4, 2);
7190 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7191 IEM_MC_ARG(uint32_t, u32Src, 1);
7192 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7193 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7195
7196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7198 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7199 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7200 IEM_MC_FETCH_EFLAGS(EFlags);
7201 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7202 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7203
7204 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7205 IEM_MC_COMMIT_EFLAGS(EFlags);
7206 IEM_MC_ADVANCE_RIP();
7207 IEM_MC_END();
7208 return VINF_SUCCESS;
7209
7210 case IEMMODE_64BIT:
7211 IEM_MC_BEGIN(4, 2);
7212 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7213 IEM_MC_ARG(uint64_t, u64Src, 1);
7214 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7215 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7217
7218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7220 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7221 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7222 IEM_MC_FETCH_EFLAGS(EFlags);
7223 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7224 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7225
7226 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7227 IEM_MC_COMMIT_EFLAGS(EFlags);
7228 IEM_MC_ADVANCE_RIP();
7229 IEM_MC_END();
7230 return VINF_SUCCESS;
7231
7232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7233 }
7234 }
7235}
7236
7237
7238
7239/** Opcode 0x0f 0xa4. */
7240FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
7241{
7242 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
7243 IEMOP_HLP_MIN_386();
7244 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
7245}
7246
7247
7248/** Opcode 0x0f 0xa5. */
7249FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
7250{
7251 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
7252 IEMOP_HLP_MIN_386();
7253 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
7254}
7255
7256
7257/** Opcode 0x0f 0xa8. */
7258FNIEMOP_DEF(iemOp_push_gs)
7259{
7260 IEMOP_MNEMONIC(push_gs, "push gs");
7261 IEMOP_HLP_MIN_386();
7262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7263 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
7264}
7265
7266
7267/** Opcode 0x0f 0xa9. */
7268FNIEMOP_DEF(iemOp_pop_gs)
7269{
7270 IEMOP_MNEMONIC(pop_gs, "pop gs");
7271 IEMOP_HLP_MIN_386();
7272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7273 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
7274}
7275
7276
7277/** Opcode 0x0f 0xaa. */
7278FNIEMOP_DEF(iemOp_rsm)
7279{
7280 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
7281 IEMOP_HLP_MIN_386(); /* 386SL and later. */
7282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7283 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
7284}
7285
7286
7287
7288/** Opcode 0x0f 0xab. */
7289FNIEMOP_DEF(iemOp_bts_Ev_Gv)
7290{
7291 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
7292 IEMOP_HLP_MIN_386();
7293 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
7294}
7295
7296
7297/** Opcode 0x0f 0xac. */
7298FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
7299{
7300 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
7301 IEMOP_HLP_MIN_386();
7302 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
7303}
7304
7305
7306/** Opcode 0x0f 0xad. */
7307FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
7308{
7309 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
7310 IEMOP_HLP_MIN_386();
7311 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
7312}
7313
7314
7315/** Opcode 0x0f 0xae mem/0. */
7316FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
7317{
7318 IEMOP_MNEMONIC(fxsave, "fxsave m512");
7319 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
7320 return IEMOP_RAISE_INVALID_OPCODE();
7321
7322 IEM_MC_BEGIN(3, 1);
7323 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7324 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7325 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7328 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7329 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7330 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
7331 IEM_MC_END();
7332 return VINF_SUCCESS;
7333}
7334
7335
7336/** Opcode 0x0f 0xae mem/1. */
7337FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
7338{
7339 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
7340 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
7341 return IEMOP_RAISE_INVALID_OPCODE();
7342
7343 IEM_MC_BEGIN(3, 1);
7344 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7345 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7346 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7349 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7350 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7351 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
7352 IEM_MC_END();
7353 return VINF_SUCCESS;
7354}
7355
7356
7357/**
7358 * @opmaps grp15
7359 * @opcode !11/2
7360 * @oppfx none
7361 * @opcpuid sse
7362 * @opgroup og_sse_mxcsrsm
7363 * @opxcpttype 5
7364 * @optest op1=0 -> mxcsr=0
7365 * @optest op1=0x2083 -> mxcsr=0x2083
7366 * @optest op1=0xfffffffe -> value.xcpt=0xd
7367 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
7368 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
7369 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
7370 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
7371 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
7372 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
7373 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
7374 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
7375 */
7376FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
7377{
7378 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7379 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
7380 return IEMOP_RAISE_INVALID_OPCODE();
7381
7382 IEM_MC_BEGIN(2, 0);
7383 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7384 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7387 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7389 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
7390 IEM_MC_END();
7391 return VINF_SUCCESS;
7392}
7393
7394
7395/**
7396 * @opmaps grp15
7397 * @opcode !11/3
7398 * @oppfx none
7399 * @opcpuid sse
7400 * @opgroup og_sse_mxcsrsm
7401 * @opxcpttype 5
7402 * @optest mxcsr=0 -> op1=0
7403 * @optest mxcsr=0x2083 -> op1=0x2083
7404 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
7405 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
7406 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
7407 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
7408 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
7409 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
7410 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
7411 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
7412 */
7413FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
7414{
7415 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7416 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
7417 return IEMOP_RAISE_INVALID_OPCODE();
7418
7419 IEM_MC_BEGIN(2, 0);
7420 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7421 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7424 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7425 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7426 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
7427 IEM_MC_END();
7428 return VINF_SUCCESS;
7429}
7430
7431
7432/**
7433 * @opmaps grp15
7434 * @opcode !11/4
7435 * @oppfx none
7436 * @opcpuid xsave
7437 * @opgroup og_system
7438 * @opxcpttype none
7439 */
7440FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
7441{
7442 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
7443 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
7444 return IEMOP_RAISE_INVALID_OPCODE();
7445
7446 IEM_MC_BEGIN(3, 0);
7447 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7448 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7449 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7452 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7453 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7454 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
7455 IEM_MC_END();
7456 return VINF_SUCCESS;
7457}
7458
7459
7460/**
7461 * @opmaps grp15
7462 * @opcode !11/5
7463 * @oppfx none
7464 * @opcpuid xsave
7465 * @opgroup og_system
7466 * @opxcpttype none
7467 */
7468FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
7469{
7470 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
7471 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
7472 return IEMOP_RAISE_INVALID_OPCODE();
7473
7474 IEM_MC_BEGIN(3, 0);
7475 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7476 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7477 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7480 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7481 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7482 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
7483 IEM_MC_END();
7484 return VINF_SUCCESS;
7485}
7486
7487/** Opcode 0x0f 0xae mem/6. */
7488FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
7489
7490/**
7491 * @opmaps grp15
7492 * @opcode !11/7
7493 * @oppfx none
7494 * @opcpuid clfsh
7495 * @opgroup og_cachectl
7496 * @optest op1=1 ->
7497 */
7498FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
7499{
7500 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7501 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
7502 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
7503
7504 IEM_MC_BEGIN(2, 0);
7505 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7506 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7509 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7510 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
7511 IEM_MC_END();
7512 return VINF_SUCCESS;
7513}
7514
7515/**
7516 * @opmaps grp15
7517 * @opcode !11/7
7518 * @oppfx 0x66
7519 * @opcpuid clflushopt
7520 * @opgroup og_cachectl
7521 * @optest op1=1 ->
7522 */
7523FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
7524{
7525 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7526 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
7527 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
7528
7529 IEM_MC_BEGIN(2, 0);
7530 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7531 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7534 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7535 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
7536 IEM_MC_END();
7537 return VINF_SUCCESS;
7538}
7539
7540
7541/** Opcode 0x0f 0xae 11b/5. */
7542FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
7543{
7544 RT_NOREF_PV(bRm);
7545 IEMOP_MNEMONIC(lfence, "lfence");
7546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7547 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7548 return IEMOP_RAISE_INVALID_OPCODE();
7549
7550 IEM_MC_BEGIN(0, 0);
7551#ifndef RT_ARCH_ARM64
7552 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7553#endif
7554 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
7555#ifndef RT_ARCH_ARM64
7556 else
7557 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7558#endif
7559 IEM_MC_ADVANCE_RIP();
7560 IEM_MC_END();
7561 return VINF_SUCCESS;
7562}
7563
7564
7565/** Opcode 0x0f 0xae 11b/6. */
7566FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
7567{
7568 RT_NOREF_PV(bRm);
7569 IEMOP_MNEMONIC(mfence, "mfence");
7570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7571 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7572 return IEMOP_RAISE_INVALID_OPCODE();
7573
7574 IEM_MC_BEGIN(0, 0);
7575#ifndef RT_ARCH_ARM64
7576 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7577#endif
7578 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
7579#ifndef RT_ARCH_ARM64
7580 else
7581 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7582#endif
7583 IEM_MC_ADVANCE_RIP();
7584 IEM_MC_END();
7585 return VINF_SUCCESS;
7586}
7587
7588
7589/** Opcode 0x0f 0xae 11b/7. */
7590FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
7591{
7592 RT_NOREF_PV(bRm);
7593 IEMOP_MNEMONIC(sfence, "sfence");
7594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7595 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7596 return IEMOP_RAISE_INVALID_OPCODE();
7597
7598 IEM_MC_BEGIN(0, 0);
7599#ifndef RT_ARCH_ARM64
7600 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7601#endif
7602 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
7603#ifndef RT_ARCH_ARM64
7604 else
7605 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7606#endif
7607 IEM_MC_ADVANCE_RIP();
7608 IEM_MC_END();
7609 return VINF_SUCCESS;
7610}
7611
7612
7613/** Opcode 0xf3 0x0f 0xae 11b/0. */
7614FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
7615{
7616 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
7617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7618 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7619 {
7620 IEM_MC_BEGIN(1, 0);
7621 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7622 IEM_MC_ARG(uint64_t, u64Dst, 0);
7623 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
7624 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
7625 IEM_MC_ADVANCE_RIP();
7626 IEM_MC_END();
7627 }
7628 else
7629 {
7630 IEM_MC_BEGIN(1, 0);
7631 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7632 IEM_MC_ARG(uint32_t, u32Dst, 0);
7633 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
7634 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
7635 IEM_MC_ADVANCE_RIP();
7636 IEM_MC_END();
7637 }
7638 return VINF_SUCCESS;
7639}
7640
7641
7642/** Opcode 0xf3 0x0f 0xae 11b/1. */
7643FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
7644{
7645 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
7646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7647 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7648 {
7649 IEM_MC_BEGIN(1, 0);
7650 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7651 IEM_MC_ARG(uint64_t, u64Dst, 0);
7652 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
7653 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
7654 IEM_MC_ADVANCE_RIP();
7655 IEM_MC_END();
7656 }
7657 else
7658 {
7659 IEM_MC_BEGIN(1, 0);
7660 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7661 IEM_MC_ARG(uint32_t, u32Dst, 0);
7662 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
7663 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
7664 IEM_MC_ADVANCE_RIP();
7665 IEM_MC_END();
7666 }
7667 return VINF_SUCCESS;
7668}
7669
7670
7671/** Opcode 0xf3 0x0f 0xae 11b/2. */
7672FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
7673{
7674 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
7675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7676 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7677 {
7678 IEM_MC_BEGIN(1, 0);
7679 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7680 IEM_MC_ARG(uint64_t, u64Dst, 0);
7681 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7682 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7683 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
7684 IEM_MC_ADVANCE_RIP();
7685 IEM_MC_END();
7686 }
7687 else
7688 {
7689 IEM_MC_BEGIN(1, 0);
7690 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7691 IEM_MC_ARG(uint32_t, u32Dst, 0);
7692 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7693 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
7694 IEM_MC_ADVANCE_RIP();
7695 IEM_MC_END();
7696 }
7697 return VINF_SUCCESS;
7698}
7699
7700
7701/** Opcode 0xf3 0x0f 0xae 11b/3. */
7702FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
7703{
7704 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
7705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7706 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7707 {
7708 IEM_MC_BEGIN(1, 0);
7709 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7710 IEM_MC_ARG(uint64_t, u64Dst, 0);
7711 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7712 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7713 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
7714 IEM_MC_ADVANCE_RIP();
7715 IEM_MC_END();
7716 }
7717 else
7718 {
7719 IEM_MC_BEGIN(1, 0);
7720 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7721 IEM_MC_ARG(uint32_t, u32Dst, 0);
7722 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7723 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
7724 IEM_MC_ADVANCE_RIP();
7725 IEM_MC_END();
7726 }
7727 return VINF_SUCCESS;
7728}
7729
7730
7731/**
7732 * Group 15 jump table for register variant.
7733 */
7734IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
7735{ /* pfx: none, 066h, 0f3h, 0f2h */
7736 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
7737 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
7738 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
7739 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
7740 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7741 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7742 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7743 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7744};
7745AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
7746
7747
7748/**
7749 * Group 15 jump table for memory variant.
7750 */
7751IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
7752{ /* pfx: none, 066h, 0f3h, 0f2h */
7753 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7754 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7755 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7756 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7757 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7758 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7759 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7760 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7761};
7762AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
7763
7764
7765/** Opcode 0x0f 0xae. */
7766FNIEMOP_DEF(iemOp_Grp15)
7767{
7768 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
7769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7770 if (IEM_IS_MODRM_REG_MODE(bRm))
7771 /* register, register */
7772 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7773 + pVCpu->iem.s.idxPrefix], bRm);
7774 /* memory, register */
7775 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7776 + pVCpu->iem.s.idxPrefix], bRm);
7777}
7778
7779
7780/** Opcode 0x0f 0xaf. */
7781FNIEMOP_DEF(iemOp_imul_Gv_Ev)
7782{
7783 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
7784 IEMOP_HLP_MIN_386();
7785 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7786 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags));
7787}
7788
7789
7790/** Opcode 0x0f 0xb0. */
7791FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
7792{
7793 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
7794 IEMOP_HLP_MIN_486();
7795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7796
7797 if (IEM_IS_MODRM_REG_MODE(bRm))
7798 {
7799 IEMOP_HLP_DONE_DECODING();
7800 IEM_MC_BEGIN(4, 0);
7801 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7802 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7803 IEM_MC_ARG(uint8_t, u8Src, 2);
7804 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7805
7806 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7807 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7808 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
7809 IEM_MC_REF_EFLAGS(pEFlags);
7810 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7811 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7812 else
7813 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7814
7815 IEM_MC_ADVANCE_RIP();
7816 IEM_MC_END();
7817 }
7818 else
7819 {
7820 IEM_MC_BEGIN(4, 3);
7821 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7822 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7823 IEM_MC_ARG(uint8_t, u8Src, 2);
7824 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7826 IEM_MC_LOCAL(uint8_t, u8Al);
7827
7828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7829 IEMOP_HLP_DONE_DECODING();
7830 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7831 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7832 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
7833 IEM_MC_FETCH_EFLAGS(EFlags);
7834 IEM_MC_REF_LOCAL(pu8Al, u8Al);
7835 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7836 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7837 else
7838 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7839
7840 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7841 IEM_MC_COMMIT_EFLAGS(EFlags);
7842 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
7843 IEM_MC_ADVANCE_RIP();
7844 IEM_MC_END();
7845 }
7846 return VINF_SUCCESS;
7847}
7848
7849/** Opcode 0x0f 0xb1. */
7850FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
7851{
7852 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
7853 IEMOP_HLP_MIN_486();
7854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7855
7856 if (IEM_IS_MODRM_REG_MODE(bRm))
7857 {
7858 IEMOP_HLP_DONE_DECODING();
7859 switch (pVCpu->iem.s.enmEffOpSize)
7860 {
7861 case IEMMODE_16BIT:
7862 IEM_MC_BEGIN(4, 0);
7863 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7864 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7865 IEM_MC_ARG(uint16_t, u16Src, 2);
7866 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7867
7868 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7869 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7870 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
7871 IEM_MC_REF_EFLAGS(pEFlags);
7872 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7873 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7874 else
7875 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7876
7877 IEM_MC_ADVANCE_RIP();
7878 IEM_MC_END();
7879 return VINF_SUCCESS;
7880
7881 case IEMMODE_32BIT:
7882 IEM_MC_BEGIN(4, 0);
7883 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7884 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7885 IEM_MC_ARG(uint32_t, u32Src, 2);
7886 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7887
7888 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7889 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7890 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7891 IEM_MC_REF_EFLAGS(pEFlags);
7892 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7893 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7894 else
7895 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7896
7897 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7898 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7899 IEM_MC_ADVANCE_RIP();
7900 IEM_MC_END();
7901 return VINF_SUCCESS;
7902
7903 case IEMMODE_64BIT:
7904 IEM_MC_BEGIN(4, 0);
7905 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7906 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7907#ifdef RT_ARCH_X86
7908 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7909#else
7910 IEM_MC_ARG(uint64_t, u64Src, 2);
7911#endif
7912 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7913
7914 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7915 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7916 IEM_MC_REF_EFLAGS(pEFlags);
7917#ifdef RT_ARCH_X86
7918 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7919 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7920 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7921 else
7922 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7923#else
7924 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7925 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7926 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7927 else
7928 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7929#endif
7930
7931 IEM_MC_ADVANCE_RIP();
7932 IEM_MC_END();
7933 return VINF_SUCCESS;
7934
7935 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7936 }
7937 }
7938 else
7939 {
7940 switch (pVCpu->iem.s.enmEffOpSize)
7941 {
7942 case IEMMODE_16BIT:
7943 IEM_MC_BEGIN(4, 3);
7944 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7945 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7946 IEM_MC_ARG(uint16_t, u16Src, 2);
7947 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7949 IEM_MC_LOCAL(uint16_t, u16Ax);
7950
7951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7952 IEMOP_HLP_DONE_DECODING();
7953 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7954 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7955 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7956 IEM_MC_FETCH_EFLAGS(EFlags);
7957 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7958 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7959 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7960 else
7961 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7962
7963 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7964 IEM_MC_COMMIT_EFLAGS(EFlags);
7965 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7966 IEM_MC_ADVANCE_RIP();
7967 IEM_MC_END();
7968 return VINF_SUCCESS;
7969
7970 case IEMMODE_32BIT:
7971 IEM_MC_BEGIN(4, 3);
7972 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7973 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7974 IEM_MC_ARG(uint32_t, u32Src, 2);
7975 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7977 IEM_MC_LOCAL(uint32_t, u32Eax);
7978
7979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7980 IEMOP_HLP_DONE_DECODING();
7981 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7982 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7983 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7984 IEM_MC_FETCH_EFLAGS(EFlags);
7985 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7986 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7987 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7988 else
7989 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7990
7991 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7992 IEM_MC_COMMIT_EFLAGS(EFlags);
7993 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7994 IEM_MC_ADVANCE_RIP();
7995 IEM_MC_END();
7996 return VINF_SUCCESS;
7997
7998 case IEMMODE_64BIT:
7999 IEM_MC_BEGIN(4, 3);
8000 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8001 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
8002#ifdef RT_ARCH_X86
8003 IEM_MC_ARG(uint64_t *, pu64Src, 2);
8004#else
8005 IEM_MC_ARG(uint64_t, u64Src, 2);
8006#endif
8007 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8008 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8009 IEM_MC_LOCAL(uint64_t, u64Rax);
8010
8011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8012 IEMOP_HLP_DONE_DECODING();
8013 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8014 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
8015 IEM_MC_FETCH_EFLAGS(EFlags);
8016 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
8017#ifdef RT_ARCH_X86
8018 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8019 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8020 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
8021 else
8022 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
8023#else
8024 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8025 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8026 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
8027 else
8028 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
8029#endif
8030
8031 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8032 IEM_MC_COMMIT_EFLAGS(EFlags);
8033 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
8034 IEM_MC_ADVANCE_RIP();
8035 IEM_MC_END();
8036 return VINF_SUCCESS;
8037
8038 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8039 }
8040 }
8041}
8042
8043
8044FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
8045{
8046 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
8047 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
8048
8049 switch (pVCpu->iem.s.enmEffOpSize)
8050 {
8051 case IEMMODE_16BIT:
8052 IEM_MC_BEGIN(5, 1);
8053 IEM_MC_ARG(uint16_t, uSel, 0);
8054 IEM_MC_ARG(uint16_t, offSeg, 1);
8055 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8056 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8057 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8058 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8061 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8062 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
8063 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8064 IEM_MC_END();
8065 return VINF_SUCCESS;
8066
8067 case IEMMODE_32BIT:
8068 IEM_MC_BEGIN(5, 1);
8069 IEM_MC_ARG(uint16_t, uSel, 0);
8070 IEM_MC_ARG(uint32_t, offSeg, 1);
8071 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8072 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8073 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8074 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8077 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8078 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
8079 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8080 IEM_MC_END();
8081 return VINF_SUCCESS;
8082
8083 case IEMMODE_64BIT:
8084 IEM_MC_BEGIN(5, 1);
8085 IEM_MC_ARG(uint16_t, uSel, 0);
8086 IEM_MC_ARG(uint64_t, offSeg, 1);
8087 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8088 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8089 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8090 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8093 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
8094 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8095 else
8096 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8097 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
8098 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8099 IEM_MC_END();
8100 return VINF_SUCCESS;
8101
8102 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8103 }
8104}
8105
8106
8107/** Opcode 0x0f 0xb2. */
8108FNIEMOP_DEF(iemOp_lss_Gv_Mp)
8109{
8110 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
8111 IEMOP_HLP_MIN_386();
8112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8113 if (IEM_IS_MODRM_REG_MODE(bRm))
8114 return IEMOP_RAISE_INVALID_OPCODE();
8115 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
8116}
8117
8118
8119/** Opcode 0x0f 0xb3. */
8120FNIEMOP_DEF(iemOp_btr_Ev_Gv)
8121{
8122 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
8123 IEMOP_HLP_MIN_386();
8124 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
8125}
8126
8127
8128/** Opcode 0x0f 0xb4. */
8129FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
8130{
8131 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
8132 IEMOP_HLP_MIN_386();
8133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8134 if (IEM_IS_MODRM_REG_MODE(bRm))
8135 return IEMOP_RAISE_INVALID_OPCODE();
8136 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
8137}
8138
8139
8140/** Opcode 0x0f 0xb5. */
8141FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
8142{
8143 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
8144 IEMOP_HLP_MIN_386();
8145 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8146 if (IEM_IS_MODRM_REG_MODE(bRm))
8147 return IEMOP_RAISE_INVALID_OPCODE();
8148 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
8149}
8150
8151
8152/** Opcode 0x0f 0xb6. */
8153FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
8154{
8155 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
8156 IEMOP_HLP_MIN_386();
8157
8158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8159
8160 /*
8161 * If rm is denoting a register, no more instruction bytes.
8162 */
8163 if (IEM_IS_MODRM_REG_MODE(bRm))
8164 {
8165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8166 switch (pVCpu->iem.s.enmEffOpSize)
8167 {
8168 case IEMMODE_16BIT:
8169 IEM_MC_BEGIN(0, 1);
8170 IEM_MC_LOCAL(uint16_t, u16Value);
8171 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8172 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8173 IEM_MC_ADVANCE_RIP();
8174 IEM_MC_END();
8175 return VINF_SUCCESS;
8176
8177 case IEMMODE_32BIT:
8178 IEM_MC_BEGIN(0, 1);
8179 IEM_MC_LOCAL(uint32_t, u32Value);
8180 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8181 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8182 IEM_MC_ADVANCE_RIP();
8183 IEM_MC_END();
8184 return VINF_SUCCESS;
8185
8186 case IEMMODE_64BIT:
8187 IEM_MC_BEGIN(0, 1);
8188 IEM_MC_LOCAL(uint64_t, u64Value);
8189 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8190 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8191 IEM_MC_ADVANCE_RIP();
8192 IEM_MC_END();
8193 return VINF_SUCCESS;
8194
8195 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8196 }
8197 }
8198 else
8199 {
8200 /*
8201 * We're loading a register from memory.
8202 */
8203 switch (pVCpu->iem.s.enmEffOpSize)
8204 {
8205 case IEMMODE_16BIT:
8206 IEM_MC_BEGIN(0, 2);
8207 IEM_MC_LOCAL(uint16_t, u16Value);
8208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8211 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8212 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8213 IEM_MC_ADVANCE_RIP();
8214 IEM_MC_END();
8215 return VINF_SUCCESS;
8216
8217 case IEMMODE_32BIT:
8218 IEM_MC_BEGIN(0, 2);
8219 IEM_MC_LOCAL(uint32_t, u32Value);
8220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8223 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8224 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8225 IEM_MC_ADVANCE_RIP();
8226 IEM_MC_END();
8227 return VINF_SUCCESS;
8228
8229 case IEMMODE_64BIT:
8230 IEM_MC_BEGIN(0, 2);
8231 IEM_MC_LOCAL(uint64_t, u64Value);
8232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8235 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8236 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8237 IEM_MC_ADVANCE_RIP();
8238 IEM_MC_END();
8239 return VINF_SUCCESS;
8240
8241 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8242 }
8243 }
8244}
8245
8246
8247/** Opcode 0x0f 0xb7. */
8248FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
8249{
8250 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
8251 IEMOP_HLP_MIN_386();
8252
8253 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8254
8255 /** @todo Not entirely sure how the operand size prefix is handled here,
8256 * assuming that it will be ignored. Would be nice to have a few
8257 * test for this. */
8258 /*
8259 * If rm is denoting a register, no more instruction bytes.
8260 */
8261 if (IEM_IS_MODRM_REG_MODE(bRm))
8262 {
8263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8264 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8265 {
8266 IEM_MC_BEGIN(0, 1);
8267 IEM_MC_LOCAL(uint32_t, u32Value);
8268 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8269 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8270 IEM_MC_ADVANCE_RIP();
8271 IEM_MC_END();
8272 }
8273 else
8274 {
8275 IEM_MC_BEGIN(0, 1);
8276 IEM_MC_LOCAL(uint64_t, u64Value);
8277 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8278 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8279 IEM_MC_ADVANCE_RIP();
8280 IEM_MC_END();
8281 }
8282 }
8283 else
8284 {
8285 /*
8286 * We're loading a register from memory.
8287 */
8288 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8289 {
8290 IEM_MC_BEGIN(0, 2);
8291 IEM_MC_LOCAL(uint32_t, u32Value);
8292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8295 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8296 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8297 IEM_MC_ADVANCE_RIP();
8298 IEM_MC_END();
8299 }
8300 else
8301 {
8302 IEM_MC_BEGIN(0, 2);
8303 IEM_MC_LOCAL(uint64_t, u64Value);
8304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8307 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8308 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8309 IEM_MC_ADVANCE_RIP();
8310 IEM_MC_END();
8311 }
8312 }
8313 return VINF_SUCCESS;
8314}
8315
8316
8317/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
8318FNIEMOP_UD_STUB(iemOp_jmpe);
8319
8320
8321/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
8322FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
8323{
8324 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
8325 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
8326 return iemOp_InvalidNeedRM(pVCpu);
8327#ifndef TST_IEM_CHECK_MC
8328# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
8329 static const IEMOPBINSIZES s_Native =
8330 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
8331# endif
8332 static const IEMOPBINSIZES s_Fallback =
8333 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
8334#endif
8335 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback));
8336}
8337
8338
8339/**
8340 * @opcode 0xb9
8341 * @opinvalid intel-modrm
8342 * @optest ->
8343 */
8344FNIEMOP_DEF(iemOp_Grp10)
8345{
8346 /*
8347 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
8348 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
8349 */
8350 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
8351 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
8352 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
8353}
8354
8355
8356/** Opcode 0x0f 0xba. */
8357FNIEMOP_DEF(iemOp_Grp8)
8358{
8359 IEMOP_HLP_MIN_386();
8360 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8361 PCIEMOPBINSIZES pImpl;
8362 switch (IEM_GET_MODRM_REG_8(bRm))
8363 {
8364 case 0: case 1: case 2: case 3:
8365 /* Both AMD and Intel want full modr/m decoding and imm8. */
8366 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
8367 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
8368 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
8369 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
8370 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
8371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8372 }
8373 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8374
8375 if (IEM_IS_MODRM_REG_MODE(bRm))
8376 {
8377 /* register destination. */
8378 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8380
8381 switch (pVCpu->iem.s.enmEffOpSize)
8382 {
8383 case IEMMODE_16BIT:
8384 IEM_MC_BEGIN(3, 0);
8385 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8386 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
8387 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8388
8389 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8390 IEM_MC_REF_EFLAGS(pEFlags);
8391 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8392
8393 IEM_MC_ADVANCE_RIP();
8394 IEM_MC_END();
8395 return VINF_SUCCESS;
8396
8397 case IEMMODE_32BIT:
8398 IEM_MC_BEGIN(3, 0);
8399 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8400 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
8401 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8402
8403 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8404 IEM_MC_REF_EFLAGS(pEFlags);
8405 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8406
8407 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8408 IEM_MC_ADVANCE_RIP();
8409 IEM_MC_END();
8410 return VINF_SUCCESS;
8411
8412 case IEMMODE_64BIT:
8413 IEM_MC_BEGIN(3, 0);
8414 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8415 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
8416 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8417
8418 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8419 IEM_MC_REF_EFLAGS(pEFlags);
8420 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8421
8422 IEM_MC_ADVANCE_RIP();
8423 IEM_MC_END();
8424 return VINF_SUCCESS;
8425
8426 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8427 }
8428 }
8429 else
8430 {
8431 /* memory destination. */
8432
8433 uint32_t fAccess;
8434 if (pImpl->pfnLockedU16)
8435 fAccess = IEM_ACCESS_DATA_RW;
8436 else /* BT */
8437 fAccess = IEM_ACCESS_DATA_R;
8438
8439 /** @todo test negative bit offsets! */
8440 switch (pVCpu->iem.s.enmEffOpSize)
8441 {
8442 case IEMMODE_16BIT:
8443 IEM_MC_BEGIN(3, 1);
8444 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8445 IEM_MC_ARG(uint16_t, u16Src, 1);
8446 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8448
8449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8450 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8451 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
8452 if (pImpl->pfnLockedU16)
8453 IEMOP_HLP_DONE_DECODING();
8454 else
8455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8456 IEM_MC_FETCH_EFLAGS(EFlags);
8457 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8458 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8459 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8460 else
8461 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
8462 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
8463
8464 IEM_MC_COMMIT_EFLAGS(EFlags);
8465 IEM_MC_ADVANCE_RIP();
8466 IEM_MC_END();
8467 return VINF_SUCCESS;
8468
8469 case IEMMODE_32BIT:
8470 IEM_MC_BEGIN(3, 1);
8471 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8472 IEM_MC_ARG(uint32_t, u32Src, 1);
8473 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8475
8476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8477 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8478 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
8479 if (pImpl->pfnLockedU16)
8480 IEMOP_HLP_DONE_DECODING();
8481 else
8482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8483 IEM_MC_FETCH_EFLAGS(EFlags);
8484 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8485 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8486 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8487 else
8488 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
8489 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
8490
8491 IEM_MC_COMMIT_EFLAGS(EFlags);
8492 IEM_MC_ADVANCE_RIP();
8493 IEM_MC_END();
8494 return VINF_SUCCESS;
8495
8496 case IEMMODE_64BIT:
8497 IEM_MC_BEGIN(3, 1);
8498 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8499 IEM_MC_ARG(uint64_t, u64Src, 1);
8500 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8502
8503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8504 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8505 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
8506 if (pImpl->pfnLockedU16)
8507 IEMOP_HLP_DONE_DECODING();
8508 else
8509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8510 IEM_MC_FETCH_EFLAGS(EFlags);
8511 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8512 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8513 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8514 else
8515 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
8516 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
8517
8518 IEM_MC_COMMIT_EFLAGS(EFlags);
8519 IEM_MC_ADVANCE_RIP();
8520 IEM_MC_END();
8521 return VINF_SUCCESS;
8522
8523 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8524 }
8525 }
8526}
8527
8528
8529/** Opcode 0x0f 0xbb. */
8530FNIEMOP_DEF(iemOp_btc_Ev_Gv)
8531{
8532 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
8533 IEMOP_HLP_MIN_386();
8534 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
8535}
8536
8537
8538/**
8539 * Common worker for BSF and BSR instructions.
8540 *
8541 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
8542 * the destination register, which means that for 32-bit operations the high
8543 * bits must be left alone.
8544 *
8545 * @param pImpl Pointer to the instruction implementation (assembly).
8546 */
8547FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
8548{
8549 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8550
8551 /*
8552 * If rm is denoting a register, no more instruction bytes.
8553 */
8554 if (IEM_IS_MODRM_REG_MODE(bRm))
8555 {
8556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8557 switch (pVCpu->iem.s.enmEffOpSize)
8558 {
8559 case IEMMODE_16BIT:
8560 IEM_MC_BEGIN(3, 0);
8561 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8562 IEM_MC_ARG(uint16_t, u16Src, 1);
8563 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8564
8565 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
8566 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8567 IEM_MC_REF_EFLAGS(pEFlags);
8568 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8569
8570 IEM_MC_ADVANCE_RIP();
8571 IEM_MC_END();
8572 break;
8573
8574 case IEMMODE_32BIT:
8575 IEM_MC_BEGIN(3, 0);
8576 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8577 IEM_MC_ARG(uint32_t, u32Src, 1);
8578 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8579
8580 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
8581 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8582 IEM_MC_REF_EFLAGS(pEFlags);
8583 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8584 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8585 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8586 IEM_MC_ENDIF();
8587 IEM_MC_ADVANCE_RIP();
8588 IEM_MC_END();
8589 break;
8590
8591 case IEMMODE_64BIT:
8592 IEM_MC_BEGIN(3, 0);
8593 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8594 IEM_MC_ARG(uint64_t, u64Src, 1);
8595 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8596
8597 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
8598 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8599 IEM_MC_REF_EFLAGS(pEFlags);
8600 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8601
8602 IEM_MC_ADVANCE_RIP();
8603 IEM_MC_END();
8604 break;
8605 }
8606 }
8607 else
8608 {
8609 /*
8610 * We're accessing memory.
8611 */
8612 switch (pVCpu->iem.s.enmEffOpSize)
8613 {
8614 case IEMMODE_16BIT:
8615 IEM_MC_BEGIN(3, 1);
8616 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8617 IEM_MC_ARG(uint16_t, u16Src, 1);
8618 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8620
8621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8623 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8624 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8625 IEM_MC_REF_EFLAGS(pEFlags);
8626 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8627
8628 IEM_MC_ADVANCE_RIP();
8629 IEM_MC_END();
8630 break;
8631
8632 case IEMMODE_32BIT:
8633 IEM_MC_BEGIN(3, 1);
8634 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8635 IEM_MC_ARG(uint32_t, u32Src, 1);
8636 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8638
8639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8641 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8642 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8643 IEM_MC_REF_EFLAGS(pEFlags);
8644 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8645
8646 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8647 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8648 IEM_MC_ENDIF();
8649 IEM_MC_ADVANCE_RIP();
8650 IEM_MC_END();
8651 break;
8652
8653 case IEMMODE_64BIT:
8654 IEM_MC_BEGIN(3, 1);
8655 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8656 IEM_MC_ARG(uint64_t, u64Src, 1);
8657 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8659
8660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8662 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8663 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8664 IEM_MC_REF_EFLAGS(pEFlags);
8665 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8666
8667 IEM_MC_ADVANCE_RIP();
8668 IEM_MC_END();
8669 break;
8670 }
8671 }
8672 return VINF_SUCCESS;
8673}
8674
8675
8676/** Opcode 0x0f 0xbc. */
8677FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
8678{
8679 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
8680 IEMOP_HLP_MIN_386();
8681 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
8682 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
8683}
8684
8685
8686/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
8687FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
8688{
8689 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
8690 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
8691 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
8692
8693#ifndef TST_IEM_CHECK_MC
8694 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
8695 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
8696 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
8697 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
8698 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
8699 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
8700 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
8701 {
8702 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
8703 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
8704 };
8705#endif
8706 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
8707 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
8708 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
8709}
8710
8711
8712/** Opcode 0x0f 0xbd. */
8713FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
8714{
8715 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
8716 IEMOP_HLP_MIN_386();
8717 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
8718 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
8719}
8720
8721
8722/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
8723FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
8724{
8725 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
8726 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
8727 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
8728
8729#ifndef TST_IEM_CHECK_MC
8730 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
8731 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
8732 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
8733 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
8734 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
8735 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
8736 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
8737 {
8738 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
8739 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
8740 };
8741#endif
8742 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
8743 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
8744 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
8745}
8746
8747
8748
8749/** Opcode 0x0f 0xbe. */
8750FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
8751{
8752 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
8753 IEMOP_HLP_MIN_386();
8754
8755 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8756
8757 /*
8758 * If rm is denoting a register, no more instruction bytes.
8759 */
8760 if (IEM_IS_MODRM_REG_MODE(bRm))
8761 {
8762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8763 switch (pVCpu->iem.s.enmEffOpSize)
8764 {
8765 case IEMMODE_16BIT:
8766 IEM_MC_BEGIN(0, 1);
8767 IEM_MC_LOCAL(uint16_t, u16Value);
8768 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8769 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8770 IEM_MC_ADVANCE_RIP();
8771 IEM_MC_END();
8772 return VINF_SUCCESS;
8773
8774 case IEMMODE_32BIT:
8775 IEM_MC_BEGIN(0, 1);
8776 IEM_MC_LOCAL(uint32_t, u32Value);
8777 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8778 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8779 IEM_MC_ADVANCE_RIP();
8780 IEM_MC_END();
8781 return VINF_SUCCESS;
8782
8783 case IEMMODE_64BIT:
8784 IEM_MC_BEGIN(0, 1);
8785 IEM_MC_LOCAL(uint64_t, u64Value);
8786 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8787 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8788 IEM_MC_ADVANCE_RIP();
8789 IEM_MC_END();
8790 return VINF_SUCCESS;
8791
8792 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8793 }
8794 }
8795 else
8796 {
8797 /*
8798 * We're loading a register from memory.
8799 */
8800 switch (pVCpu->iem.s.enmEffOpSize)
8801 {
8802 case IEMMODE_16BIT:
8803 IEM_MC_BEGIN(0, 2);
8804 IEM_MC_LOCAL(uint16_t, u16Value);
8805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8808 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8809 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8810 IEM_MC_ADVANCE_RIP();
8811 IEM_MC_END();
8812 return VINF_SUCCESS;
8813
8814 case IEMMODE_32BIT:
8815 IEM_MC_BEGIN(0, 2);
8816 IEM_MC_LOCAL(uint32_t, u32Value);
8817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8820 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8821 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8822 IEM_MC_ADVANCE_RIP();
8823 IEM_MC_END();
8824 return VINF_SUCCESS;
8825
8826 case IEMMODE_64BIT:
8827 IEM_MC_BEGIN(0, 2);
8828 IEM_MC_LOCAL(uint64_t, u64Value);
8829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8832 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8833 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8834 IEM_MC_ADVANCE_RIP();
8835 IEM_MC_END();
8836 return VINF_SUCCESS;
8837
8838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8839 }
8840 }
8841}
8842
8843
8844/** Opcode 0x0f 0xbf. */
8845FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
8846{
8847 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
8848 IEMOP_HLP_MIN_386();
8849
8850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8851
8852 /** @todo Not entirely sure how the operand size prefix is handled here,
8853 * assuming that it will be ignored. Would be nice to have a few
8854 * test for this. */
8855 /*
8856 * If rm is denoting a register, no more instruction bytes.
8857 */
8858 if (IEM_IS_MODRM_REG_MODE(bRm))
8859 {
8860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8861 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8862 {
8863 IEM_MC_BEGIN(0, 1);
8864 IEM_MC_LOCAL(uint32_t, u32Value);
8865 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8866 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8867 IEM_MC_ADVANCE_RIP();
8868 IEM_MC_END();
8869 }
8870 else
8871 {
8872 IEM_MC_BEGIN(0, 1);
8873 IEM_MC_LOCAL(uint64_t, u64Value);
8874 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8875 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8876 IEM_MC_ADVANCE_RIP();
8877 IEM_MC_END();
8878 }
8879 }
8880 else
8881 {
8882 /*
8883 * We're loading a register from memory.
8884 */
8885 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8886 {
8887 IEM_MC_BEGIN(0, 2);
8888 IEM_MC_LOCAL(uint32_t, u32Value);
8889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8890 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8892 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8893 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8894 IEM_MC_ADVANCE_RIP();
8895 IEM_MC_END();
8896 }
8897 else
8898 {
8899 IEM_MC_BEGIN(0, 2);
8900 IEM_MC_LOCAL(uint64_t, u64Value);
8901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8904 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8905 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8906 IEM_MC_ADVANCE_RIP();
8907 IEM_MC_END();
8908 }
8909 }
8910 return VINF_SUCCESS;
8911}
8912
8913
8914/** Opcode 0x0f 0xc0. */
8915FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
8916{
8917 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8918 IEMOP_HLP_MIN_486();
8919 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
8920
8921 /*
8922 * If rm is denoting a register, no more instruction bytes.
8923 */
8924 if (IEM_IS_MODRM_REG_MODE(bRm))
8925 {
8926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8927
8928 IEM_MC_BEGIN(3, 0);
8929 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8930 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8931 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8932
8933 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8934 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
8935 IEM_MC_REF_EFLAGS(pEFlags);
8936 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8937
8938 IEM_MC_ADVANCE_RIP();
8939 IEM_MC_END();
8940 }
8941 else
8942 {
8943 /*
8944 * We're accessing memory.
8945 */
8946 IEM_MC_BEGIN(3, 3);
8947 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8948 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8949 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8950 IEM_MC_LOCAL(uint8_t, u8RegCopy);
8951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8952
8953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8954 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8955 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
8956 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
8957 IEM_MC_FETCH_EFLAGS(EFlags);
8958 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8959 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8960 else
8961 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
8962
8963 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8964 IEM_MC_COMMIT_EFLAGS(EFlags);
8965 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
8966 IEM_MC_ADVANCE_RIP();
8967 IEM_MC_END();
8968 return VINF_SUCCESS;
8969 }
8970 return VINF_SUCCESS;
8971}
8972
8973
8974/** Opcode 0x0f 0xc1. */
8975FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
8976{
8977 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
8978 IEMOP_HLP_MIN_486();
8979 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8980
8981 /*
8982 * If rm is denoting a register, no more instruction bytes.
8983 */
8984 if (IEM_IS_MODRM_REG_MODE(bRm))
8985 {
8986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8987
8988 switch (pVCpu->iem.s.enmEffOpSize)
8989 {
8990 case IEMMODE_16BIT:
8991 IEM_MC_BEGIN(3, 0);
8992 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8993 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8994 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8995
8996 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8997 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
8998 IEM_MC_REF_EFLAGS(pEFlags);
8999 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
9000
9001 IEM_MC_ADVANCE_RIP();
9002 IEM_MC_END();
9003 return VINF_SUCCESS;
9004
9005 case IEMMODE_32BIT:
9006 IEM_MC_BEGIN(3, 0);
9007 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9008 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9009 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9010
9011 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9012 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9013 IEM_MC_REF_EFLAGS(pEFlags);
9014 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
9015
9016 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9017 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
9018 IEM_MC_ADVANCE_RIP();
9019 IEM_MC_END();
9020 return VINF_SUCCESS;
9021
9022 case IEMMODE_64BIT:
9023 IEM_MC_BEGIN(3, 0);
9024 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9025 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9026 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9027
9028 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9029 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9030 IEM_MC_REF_EFLAGS(pEFlags);
9031 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
9032
9033 IEM_MC_ADVANCE_RIP();
9034 IEM_MC_END();
9035 return VINF_SUCCESS;
9036
9037 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9038 }
9039 }
9040 else
9041 {
9042 /*
9043 * We're accessing memory.
9044 */
9045 switch (pVCpu->iem.s.enmEffOpSize)
9046 {
9047 case IEMMODE_16BIT:
9048 IEM_MC_BEGIN(3, 3);
9049 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9050 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9051 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9052 IEM_MC_LOCAL(uint16_t, u16RegCopy);
9053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9054
9055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9056 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9057 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9058 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
9059 IEM_MC_FETCH_EFLAGS(EFlags);
9060 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9061 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
9062 else
9063 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
9064
9065 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9066 IEM_MC_COMMIT_EFLAGS(EFlags);
9067 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
9068 IEM_MC_ADVANCE_RIP();
9069 IEM_MC_END();
9070 return VINF_SUCCESS;
9071
9072 case IEMMODE_32BIT:
9073 IEM_MC_BEGIN(3, 3);
9074 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9075 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9076 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9077 IEM_MC_LOCAL(uint32_t, u32RegCopy);
9078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9079
9080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9081 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9082 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9083 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
9084 IEM_MC_FETCH_EFLAGS(EFlags);
9085 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9086 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
9087 else
9088 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
9089
9090 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9091 IEM_MC_COMMIT_EFLAGS(EFlags);
9092 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
9093 IEM_MC_ADVANCE_RIP();
9094 IEM_MC_END();
9095 return VINF_SUCCESS;
9096
9097 case IEMMODE_64BIT:
9098 IEM_MC_BEGIN(3, 3);
9099 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9100 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9101 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9102 IEM_MC_LOCAL(uint64_t, u64RegCopy);
9103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9104
9105 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9106 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9107 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9108 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
9109 IEM_MC_FETCH_EFLAGS(EFlags);
9110 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9111 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
9112 else
9113 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
9114
9115 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9116 IEM_MC_COMMIT_EFLAGS(EFlags);
9117 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
9118 IEM_MC_ADVANCE_RIP();
9119 IEM_MC_END();
9120 return VINF_SUCCESS;
9121
9122 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9123 }
9124 }
9125}
9126
9127
9128/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
9129FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
9130/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
9131FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
9132/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
9133FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
9134/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
9135FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
9136
9137
9138/** Opcode 0x0f 0xc3. */
9139FNIEMOP_DEF(iemOp_movnti_My_Gy)
9140{
9141 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
9142
9143 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9144
9145 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
9146 if (IEM_IS_MODRM_MEM_MODE(bRm))
9147 {
9148 switch (pVCpu->iem.s.enmEffOpSize)
9149 {
9150 case IEMMODE_32BIT:
9151 IEM_MC_BEGIN(0, 2);
9152 IEM_MC_LOCAL(uint32_t, u32Value);
9153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9154
9155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9157 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9158 return IEMOP_RAISE_INVALID_OPCODE();
9159
9160 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
9161 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
9162 IEM_MC_ADVANCE_RIP();
9163 IEM_MC_END();
9164 break;
9165
9166 case IEMMODE_64BIT:
9167 IEM_MC_BEGIN(0, 2);
9168 IEM_MC_LOCAL(uint64_t, u64Value);
9169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9170
9171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9173 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9174 return IEMOP_RAISE_INVALID_OPCODE();
9175
9176 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
9177 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
9178 IEM_MC_ADVANCE_RIP();
9179 IEM_MC_END();
9180 break;
9181
9182 case IEMMODE_16BIT:
9183 /** @todo check this form. */
9184 return IEMOP_RAISE_INVALID_OPCODE();
9185 }
9186 }
9187 else
9188 return IEMOP_RAISE_INVALID_OPCODE();
9189 return VINF_SUCCESS;
9190}
9191/* Opcode 0x66 0x0f 0xc3 - invalid */
9192/* Opcode 0xf3 0x0f 0xc3 - invalid */
9193/* Opcode 0xf2 0x0f 0xc3 - invalid */
9194
9195/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
9196FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
9197/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
9198FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
9199/* Opcode 0xf3 0x0f 0xc4 - invalid */
9200/* Opcode 0xf2 0x0f 0xc4 - invalid */
9201
9202/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
9203FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
9204/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
9205FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
9206/* Opcode 0xf3 0x0f 0xc5 - invalid */
9207/* Opcode 0xf2 0x0f 0xc5 - invalid */
9208
9209/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
9210FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
9211/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
9212FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
9213/* Opcode 0xf3 0x0f 0xc6 - invalid */
9214/* Opcode 0xf2 0x0f 0xc6 - invalid */
9215
9216
9217/** Opcode 0x0f 0xc7 !11/1. */
9218FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
9219{
9220 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
9221
9222 IEM_MC_BEGIN(4, 3);
9223 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
9224 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
9225 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
9226 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
9227 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
9228 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
9229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9230
9231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9232 IEMOP_HLP_DONE_DECODING();
9233 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9234
9235 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
9236 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
9237 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
9238
9239 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
9240 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
9241 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
9242
9243 IEM_MC_FETCH_EFLAGS(EFlags);
9244 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9245 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
9246 else
9247 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
9248
9249 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
9250 IEM_MC_COMMIT_EFLAGS(EFlags);
9251 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9252 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
9253 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
9254 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
9255 IEM_MC_ENDIF();
9256 IEM_MC_ADVANCE_RIP();
9257
9258 IEM_MC_END();
9259 return VINF_SUCCESS;
9260}
9261
9262
9263/** Opcode REX.W 0x0f 0xc7 !11/1. */
9264FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
9265{
9266 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
9267 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
9268 {
9269#if 0
9270 RT_NOREF(bRm);
9271 IEMOP_BITCH_ABOUT_STUB();
9272 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9273#else
9274 IEM_MC_BEGIN(4, 3);
9275 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
9276 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
9277 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
9278 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
9279 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
9280 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
9281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9282
9283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9284 IEMOP_HLP_DONE_DECODING();
9285 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
9286 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9287
9288 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
9289 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
9290 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
9291
9292 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
9293 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
9294 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
9295
9296 IEM_MC_FETCH_EFLAGS(EFlags);
9297# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)
9298# if defined(RT_ARCH_AMD64)
9299 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
9300# endif
9301 {
9302 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9303 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9304 else
9305 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9306 }
9307# if defined(RT_ARCH_AMD64)
9308 else
9309# endif
9310# endif
9311# if !defined(RT_ARCH_ARM64) /** @todo may need this for unaligned accesses... */
9312 {
9313 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
9314 accesses and not all all atomic, which works fine on in UNI CPU guest
9315 configuration (ignoring DMA). If guest SMP is active we have no choice
9316 but to use a rendezvous callback here. Sigh. */
9317 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
9318 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9319 else
9320 {
9321 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9322 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
9323 }
9324 }
9325# endif
9326
9327 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
9328 IEM_MC_COMMIT_EFLAGS(EFlags);
9329 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9330 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
9331 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
9332 IEM_MC_ENDIF();
9333 IEM_MC_ADVANCE_RIP();
9334
9335 IEM_MC_END();
9336 return VINF_SUCCESS;
9337#endif
9338 }
9339 Log(("cmpxchg16b -> #UD\n"));
9340 return IEMOP_RAISE_INVALID_OPCODE();
9341}
9342
9343FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
9344{
9345 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
9346 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
9347 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
9348}
9349
9350/** Opcode 0x0f 0xc7 11/6. */
9351FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
9352
9353/** Opcode 0x0f 0xc7 !11/6. */
9354#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9355FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
9356{
9357 IEMOP_MNEMONIC(vmptrld, "vmptrld");
9358 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
9359 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
9360 IEM_MC_BEGIN(2, 0);
9361 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9362 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
9363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9364 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
9365 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9366 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
9367 IEM_MC_END();
9368 return VINF_SUCCESS;
9369}
9370#else
9371FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
9372#endif
9373
9374/** Opcode 0x66 0x0f 0xc7 !11/6. */
9375#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9376FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
9377{
9378 IEMOP_MNEMONIC(vmclear, "vmclear");
9379 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
9380 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
9381 IEM_MC_BEGIN(2, 0);
9382 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9383 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
9384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9385 IEMOP_HLP_DONE_DECODING();
9386 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9387 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
9388 IEM_MC_END();
9389 return VINF_SUCCESS;
9390}
9391#else
9392FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
9393#endif
9394
9395/** Opcode 0xf3 0x0f 0xc7 !11/6. */
9396#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9397FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
9398{
9399 IEMOP_MNEMONIC(vmxon, "vmxon");
9400 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
9401 IEM_MC_BEGIN(2, 0);
9402 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9403 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
9404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9405 IEMOP_HLP_DONE_DECODING();
9406 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9407 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
9408 IEM_MC_END();
9409 return VINF_SUCCESS;
9410}
9411#else
9412FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
9413#endif
9414
9415/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
9416#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9417FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
9418{
9419 IEMOP_MNEMONIC(vmptrst, "vmptrst");
9420 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
9421 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
9422 IEM_MC_BEGIN(2, 0);
9423 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9424 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
9425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9426 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
9427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9428 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
9429 IEM_MC_END();
9430 return VINF_SUCCESS;
9431}
9432#else
9433FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
9434#endif
9435
9436/** Opcode 0x0f 0xc7 11/7. */
9437FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
9438
9439
9440/**
9441 * Group 9 jump table for register variant.
9442 */
9443IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
9444{ /* pfx: none, 066h, 0f3h, 0f2h */
9445 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
9446 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
9447 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
9448 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
9449 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9450 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
9451 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9452 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9453};
9454AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
9455
9456
9457/**
9458 * Group 9 jump table for memory variant.
9459 */
9460IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
9461{ /* pfx: none, 066h, 0f3h, 0f2h */
9462 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
9463 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
9464 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
9465 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
9466 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9467 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
9468 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
9469 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9470};
9471AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
9472
9473
9474/** Opcode 0x0f 0xc7. */
9475FNIEMOP_DEF(iemOp_Grp9)
9476{
9477 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
9478 if (IEM_IS_MODRM_REG_MODE(bRm))
9479 /* register, register */
9480 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9481 + pVCpu->iem.s.idxPrefix], bRm);
9482 /* memory, register */
9483 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9484 + pVCpu->iem.s.idxPrefix], bRm);
9485}
9486
9487
9488/**
9489 * Common 'bswap register' helper.
9490 */
9491FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
9492{
9493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9494 switch (pVCpu->iem.s.enmEffOpSize)
9495 {
9496 case IEMMODE_16BIT:
9497 IEM_MC_BEGIN(1, 0);
9498 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9499 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
9500 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
9501 IEM_MC_ADVANCE_RIP();
9502 IEM_MC_END();
9503 return VINF_SUCCESS;
9504
9505 case IEMMODE_32BIT:
9506 IEM_MC_BEGIN(1, 0);
9507 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9508 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9509 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9510 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
9511 IEM_MC_ADVANCE_RIP();
9512 IEM_MC_END();
9513 return VINF_SUCCESS;
9514
9515 case IEMMODE_64BIT:
9516 IEM_MC_BEGIN(1, 0);
9517 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9518 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9519 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
9520 IEM_MC_ADVANCE_RIP();
9521 IEM_MC_END();
9522 return VINF_SUCCESS;
9523
9524 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9525 }
9526}
9527
9528
9529/** Opcode 0x0f 0xc8. */
9530FNIEMOP_DEF(iemOp_bswap_rAX_r8)
9531{
9532 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
9533 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
9534 prefix. REX.B is the correct prefix it appears. For a parallel
9535 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
9536 IEMOP_HLP_MIN_486();
9537 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
9538}
9539
9540
9541/** Opcode 0x0f 0xc9. */
9542FNIEMOP_DEF(iemOp_bswap_rCX_r9)
9543{
9544 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
9545 IEMOP_HLP_MIN_486();
9546 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
9547}
9548
9549
9550/** Opcode 0x0f 0xca. */
9551FNIEMOP_DEF(iemOp_bswap_rDX_r10)
9552{
9553 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
9554 IEMOP_HLP_MIN_486();
9555 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
9556}
9557
9558
9559/** Opcode 0x0f 0xcb. */
9560FNIEMOP_DEF(iemOp_bswap_rBX_r11)
9561{
9562 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
9563 IEMOP_HLP_MIN_486();
9564 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
9565}
9566
9567
9568/** Opcode 0x0f 0xcc. */
9569FNIEMOP_DEF(iemOp_bswap_rSP_r12)
9570{
9571 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
9572 IEMOP_HLP_MIN_486();
9573 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
9574}
9575
9576
9577/** Opcode 0x0f 0xcd. */
9578FNIEMOP_DEF(iemOp_bswap_rBP_r13)
9579{
9580 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
9581 IEMOP_HLP_MIN_486();
9582 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
9583}
9584
9585
9586/** Opcode 0x0f 0xce. */
9587FNIEMOP_DEF(iemOp_bswap_rSI_r14)
9588{
9589 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
9590 IEMOP_HLP_MIN_486();
9591 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
9592}
9593
9594
9595/** Opcode 0x0f 0xcf. */
9596FNIEMOP_DEF(iemOp_bswap_rDI_r15)
9597{
9598 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
9599 IEMOP_HLP_MIN_486();
9600 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
9601}
9602
9603
9604/* Opcode 0x0f 0xd0 - invalid */
9605/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
9606FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
9607/* Opcode 0xf3 0x0f 0xd0 - invalid */
9608/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
9609FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
9610
9611/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
9612FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
9613{
9614 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
9615 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
9616}
9617
9618/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
9619FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
9620{
9621 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
9622 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
9623}
9624
9625/* Opcode 0xf3 0x0f 0xd1 - invalid */
9626/* Opcode 0xf2 0x0f 0xd1 - invalid */
9627
9628/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
9629FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
9630{
9631 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
9632 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
9633}
9634
9635
9636/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
9637FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
9638{
9639 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
9640 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
9641}
9642
9643
9644/* Opcode 0xf3 0x0f 0xd2 - invalid */
9645/* Opcode 0xf2 0x0f 0xd2 - invalid */
9646
9647/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
9648FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
9649{
9650 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9651 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
9652}
9653
9654
9655/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
9656FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
9657{
9658 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
9659 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
9660}
9661
9662
9663/* Opcode 0xf3 0x0f 0xd3 - invalid */
9664/* Opcode 0xf2 0x0f 0xd3 - invalid */
9665
9666
9667/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
9668FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
9669{
9670 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9671 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_paddq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
9672}
9673
9674
9675/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
9676FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
9677{
9678 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9679 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
9680}
9681
9682
9683/* Opcode 0xf3 0x0f 0xd4 - invalid */
9684/* Opcode 0xf2 0x0f 0xd4 - invalid */
9685
9686/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
9687FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
9688{
9689 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9690 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
9691}
9692
9693/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
9694FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
9695{
9696 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9697 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
9698}
9699
9700
9701/* Opcode 0xf3 0x0f 0xd5 - invalid */
9702/* Opcode 0xf2 0x0f 0xd5 - invalid */
9703
9704/* Opcode 0x0f 0xd6 - invalid */
9705
9706/**
9707 * @opcode 0xd6
9708 * @oppfx 0x66
9709 * @opcpuid sse2
9710 * @opgroup og_sse2_pcksclr_datamove
9711 * @opxcpttype none
9712 * @optest op1=-1 op2=2 -> op1=2
9713 * @optest op1=0 op2=-42 -> op1=-42
9714 */
9715FNIEMOP_DEF(iemOp_movq_Wq_Vq)
9716{
9717 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
9718 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9719 if (IEM_IS_MODRM_REG_MODE(bRm))
9720 {
9721 /*
9722 * Register, register.
9723 */
9724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9725 IEM_MC_BEGIN(0, 2);
9726 IEM_MC_LOCAL(uint64_t, uSrc);
9727
9728 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9729 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
9730
9731 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
9732 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
9733
9734 IEM_MC_ADVANCE_RIP();
9735 IEM_MC_END();
9736 }
9737 else
9738 {
9739 /*
9740 * Memory, register.
9741 */
9742 IEM_MC_BEGIN(0, 2);
9743 IEM_MC_LOCAL(uint64_t, uSrc);
9744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9745
9746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9748 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9749 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9750
9751 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
9752 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9753
9754 IEM_MC_ADVANCE_RIP();
9755 IEM_MC_END();
9756 }
9757 return VINF_SUCCESS;
9758}
9759
9760
9761/**
9762 * @opcode 0xd6
9763 * @opcodesub 11 mr/reg
9764 * @oppfx f3
9765 * @opcpuid sse2
9766 * @opgroup og_sse2_simdint_datamove
9767 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
9768 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9769 */
9770FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
9771{
9772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9773 if (IEM_IS_MODRM_REG_MODE(bRm))
9774 {
9775 /*
9776 * Register, register.
9777 */
9778 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9780 IEM_MC_BEGIN(0, 1);
9781 IEM_MC_LOCAL(uint64_t, uSrc);
9782
9783 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9784 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9785
9786 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
9787 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
9788 IEM_MC_FPU_TO_MMX_MODE();
9789
9790 IEM_MC_ADVANCE_RIP();
9791 IEM_MC_END();
9792 return VINF_SUCCESS;
9793 }
9794
9795 /**
9796 * @opdone
9797 * @opmnemonic udf30fd6mem
9798 * @opcode 0xd6
9799 * @opcodesub !11 mr/reg
9800 * @oppfx f3
9801 * @opunused intel-modrm
9802 * @opcpuid sse
9803 * @optest ->
9804 */
9805 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
9806}
9807
9808
9809/**
9810 * @opcode 0xd6
9811 * @opcodesub 11 mr/reg
9812 * @oppfx f2
9813 * @opcpuid sse2
9814 * @opgroup og_sse2_simdint_datamove
9815 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
9816 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9817 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
9818 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
9819 * @optest op1=-42 op2=0xfedcba9876543210
9820 * -> op1=0xfedcba9876543210 ftw=0xff
9821 */
9822FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
9823{
9824 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9825 if (IEM_IS_MODRM_REG_MODE(bRm))
9826 {
9827 /*
9828 * Register, register.
9829 */
9830 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9832 IEM_MC_BEGIN(0, 1);
9833 IEM_MC_LOCAL(uint64_t, uSrc);
9834
9835 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9836 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9837
9838 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
9839 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
9840 IEM_MC_FPU_TO_MMX_MODE();
9841
9842 IEM_MC_ADVANCE_RIP();
9843 IEM_MC_END();
9844 return VINF_SUCCESS;
9845 }
9846
9847 /**
9848 * @opdone
9849 * @opmnemonic udf20fd6mem
9850 * @opcode 0xd6
9851 * @opcodesub !11 mr/reg
9852 * @oppfx f2
9853 * @opunused intel-modrm
9854 * @opcpuid sse
9855 * @optest ->
9856 */
9857 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
9858}
9859
9860
9861/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
9862FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
9863{
9864 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9865 /* Docs says register only. */
9866 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
9867 {
9868 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
9869 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS, 0);
9870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9871 IEM_MC_BEGIN(2, 0);
9872 IEM_MC_ARG(uint64_t *, puDst, 0);
9873 IEM_MC_ARG(uint64_t const *, puSrc, 1);
9874 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
9875 IEM_MC_PREPARE_FPU_USAGE();
9876 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
9877 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
9878 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
9879 IEM_MC_FPU_TO_MMX_MODE();
9880 IEM_MC_ADVANCE_RIP();
9881 IEM_MC_END();
9882 return VINF_SUCCESS;
9883 }
9884 return IEMOP_RAISE_INVALID_OPCODE();
9885}
9886
9887
9888/** Opcode 0x66 0x0f 0xd7 - */
9889FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
9890{
9891 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9892 /* Docs says register only. */
9893 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
9894 {
9895 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
9896 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
9897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9898 IEM_MC_BEGIN(2, 0);
9899 IEM_MC_ARG(uint64_t *, puDst, 0);
9900 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
9901 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9902 IEM_MC_PREPARE_SSE_USAGE();
9903 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
9904 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
9905 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
9906 IEM_MC_ADVANCE_RIP();
9907 IEM_MC_END();
9908 return VINF_SUCCESS;
9909 }
9910 return IEMOP_RAISE_INVALID_OPCODE();
9911}
9912
9913
9914/* Opcode 0xf3 0x0f 0xd7 - invalid */
9915/* Opcode 0xf2 0x0f 0xd7 - invalid */
9916
9917
9918/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
9919FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
9920{
9921 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9922 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
9923}
9924
9925
9926/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
9927FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
9928{
9929 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9930 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
9931}
9932
9933
9934/* Opcode 0xf3 0x0f 0xd8 - invalid */
9935/* Opcode 0xf2 0x0f 0xd8 - invalid */
9936
9937/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
9938FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
9939{
9940 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9941 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
9942}
9943
9944
9945/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
9946FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
9947{
9948 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9949 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
9950}
9951
9952
9953/* Opcode 0xf3 0x0f 0xd9 - invalid */
9954/* Opcode 0xf2 0x0f 0xd9 - invalid */
9955
9956/** Opcode 0x0f 0xda - pminub Pq, Qq */
9957FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
9958{
9959 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
9960 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
9961}
9962
9963
9964/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
9965FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
9966{
9967 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
9968 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
9969}
9970
9971/* Opcode 0xf3 0x0f 0xda - invalid */
9972/* Opcode 0xf2 0x0f 0xda - invalid */
9973
9974/** Opcode 0x0f 0xdb - pand Pq, Qq */
9975FNIEMOP_DEF(iemOp_pand_Pq_Qq)
9976{
9977 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9978 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
9979}
9980
9981
9982/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
9983FNIEMOP_DEF(iemOp_pand_Vx_Wx)
9984{
9985 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9986 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
9987}
9988
9989
9990/* Opcode 0xf3 0x0f 0xdb - invalid */
9991/* Opcode 0xf2 0x0f 0xdb - invalid */
9992
9993/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
9994FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
9995{
9996 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9997 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
9998}
9999
10000
10001/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
10002FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
10003{
10004 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10005 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
10006}
10007
10008
10009/* Opcode 0xf3 0x0f 0xdc - invalid */
10010/* Opcode 0xf2 0x0f 0xdc - invalid */
10011
10012/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
10013FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
10014{
10015 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10016 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
10017}
10018
10019
10020/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
10021FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
10022{
10023 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10024 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
10025}
10026
10027
10028/* Opcode 0xf3 0x0f 0xdd - invalid */
10029/* Opcode 0xf2 0x0f 0xdd - invalid */
10030
10031/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
10032FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
10033{
10034 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10035 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
10036}
10037
10038
10039/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
10040FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
10041{
10042 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10043 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
10044}
10045
10046/* Opcode 0xf3 0x0f 0xde - invalid */
10047/* Opcode 0xf2 0x0f 0xde - invalid */
10048
10049
10050/** Opcode 0x0f 0xdf - pandn Pq, Qq */
10051FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
10052{
10053 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10054 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
10055}
10056
10057
10058/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
10059FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
10060{
10061 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10062 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
10063}
10064
10065
10066/* Opcode 0xf3 0x0f 0xdf - invalid */
10067/* Opcode 0xf2 0x0f 0xdf - invalid */
10068
10069/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
10070FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
10071/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
10072FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
10073/* Opcode 0xf3 0x0f 0xe0 - invalid */
10074/* Opcode 0xf2 0x0f 0xe0 - invalid */
10075
10076/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
10077FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
10078{
10079 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10080 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
10081}
10082
10083
10084/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
10085FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
10086{
10087 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10088 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
10089}
10090
10091/* Opcode 0xf3 0x0f 0xe1 - invalid */
10092/* Opcode 0xf2 0x0f 0xe1 - invalid */
10093
10094/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
10095FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
10096{
10097 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10098 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
10099}
10100
10101
10102/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
10103FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
10104{
10105 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10106 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
10107}
10108
10109/* Opcode 0xf3 0x0f 0xe2 - invalid */
10110/* Opcode 0xf2 0x0f 0xe2 - invalid */
10111
10112/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
10113FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
10114/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
10115FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
10116/* Opcode 0xf3 0x0f 0xe3 - invalid */
10117/* Opcode 0xf2 0x0f 0xe3 - invalid */
10118
10119/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
10120FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
10121/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
10122FNIEMOP_STUB(iemOp_pmulhuw_Vx_Wx);
10123/* Opcode 0xf3 0x0f 0xe4 - invalid */
10124/* Opcode 0xf2 0x0f 0xe4 - invalid */
10125
10126/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
10127FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
10128{
10129 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10130 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
10131}
10132
10133
10134/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
10135FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
10136{
10137 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10138 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
10139}
10140
10141
10142/* Opcode 0xf3 0x0f 0xe5 - invalid */
10143/* Opcode 0xf2 0x0f 0xe5 - invalid */
10144
10145/* Opcode 0x0f 0xe6 - invalid */
10146/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
10147FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
10148/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
10149FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
10150/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
10151FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
10152
10153
10154/**
10155 * @opcode 0xe7
10156 * @opcodesub !11 mr/reg
10157 * @oppfx none
10158 * @opcpuid sse
10159 * @opgroup og_sse1_cachect
10160 * @opxcpttype none
10161 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
10162 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
10163 */
10164FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
10165{
10166 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10167 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10168 if (IEM_IS_MODRM_MEM_MODE(bRm))
10169 {
10170 /* Register, memory. */
10171 IEM_MC_BEGIN(0, 2);
10172 IEM_MC_LOCAL(uint64_t, uSrc);
10173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10174
10175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10177 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
10178 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10179
10180 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
10181 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
10182 IEM_MC_FPU_TO_MMX_MODE();
10183
10184 IEM_MC_ADVANCE_RIP();
10185 IEM_MC_END();
10186 return VINF_SUCCESS;
10187 }
10188 /**
10189 * @opdone
10190 * @opmnemonic ud0fe7reg
10191 * @opcode 0xe7
10192 * @opcodesub 11 mr/reg
10193 * @oppfx none
10194 * @opunused immediate
10195 * @opcpuid sse
10196 * @optest ->
10197 */
10198 return IEMOP_RAISE_INVALID_OPCODE();
10199}
10200
10201/**
10202 * @opcode 0xe7
10203 * @opcodesub !11 mr/reg
10204 * @oppfx 0x66
10205 * @opcpuid sse2
10206 * @opgroup og_sse2_cachect
10207 * @opxcpttype 1
10208 * @optest op1=-1 op2=2 -> op1=2
10209 * @optest op1=0 op2=-42 -> op1=-42
10210 */
10211FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
10212{
10213 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10215 if (IEM_IS_MODRM_MEM_MODE(bRm))
10216 {
10217 /* Register, memory. */
10218 IEM_MC_BEGIN(0, 2);
10219 IEM_MC_LOCAL(RTUINT128U, uSrc);
10220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10221
10222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10224 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10225 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
10226
10227 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
10228 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
10229
10230 IEM_MC_ADVANCE_RIP();
10231 IEM_MC_END();
10232 return VINF_SUCCESS;
10233 }
10234
10235 /**
10236 * @opdone
10237 * @opmnemonic ud660fe7reg
10238 * @opcode 0xe7
10239 * @opcodesub 11 mr/reg
10240 * @oppfx 0x66
10241 * @opunused immediate
10242 * @opcpuid sse
10243 * @optest ->
10244 */
10245 return IEMOP_RAISE_INVALID_OPCODE();
10246}
10247
10248/* Opcode 0xf3 0x0f 0xe7 - invalid */
10249/* Opcode 0xf2 0x0f 0xe7 - invalid */
10250
10251
10252/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
10253FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
10254{
10255 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10256 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
10257}
10258
10259
10260/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
10261FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
10262{
10263 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10264 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
10265}
10266
10267
10268/* Opcode 0xf3 0x0f 0xe8 - invalid */
10269/* Opcode 0xf2 0x0f 0xe8 - invalid */
10270
10271/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
10272FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
10273{
10274 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10275 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
10276}
10277
10278
10279/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
10280FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
10281{
10282 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10283 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
10284}
10285
10286
10287/* Opcode 0xf3 0x0f 0xe9 - invalid */
10288/* Opcode 0xf2 0x0f 0xe9 - invalid */
10289
10290
10291/** Opcode 0x0f 0xea - pminsw Pq, Qq */
10292FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
10293{
10294 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10295 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
10296}
10297
10298
10299/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
10300FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
10301{
10302 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10303 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
10304}
10305
10306
10307/* Opcode 0xf3 0x0f 0xea - invalid */
10308/* Opcode 0xf2 0x0f 0xea - invalid */
10309
10310
10311/** Opcode 0x0f 0xeb - por Pq, Qq */
10312FNIEMOP_DEF(iemOp_por_Pq_Qq)
10313{
10314 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10315 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
10316}
10317
10318
10319/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
10320FNIEMOP_DEF(iemOp_por_Vx_Wx)
10321{
10322 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10323 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
10324}
10325
10326
10327/* Opcode 0xf3 0x0f 0xeb - invalid */
10328/* Opcode 0xf2 0x0f 0xeb - invalid */
10329
10330/** Opcode 0x0f 0xec - paddsb Pq, Qq */
10331FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
10332{
10333 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10334 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
10335}
10336
10337
10338/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
10339FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
10340{
10341 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10342 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
10343}
10344
10345
10346/* Opcode 0xf3 0x0f 0xec - invalid */
10347/* Opcode 0xf2 0x0f 0xec - invalid */
10348
10349/** Opcode 0x0f 0xed - paddsw Pq, Qq */
10350FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
10351{
10352 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10353 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
10354}
10355
10356
10357/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
10358FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
10359{
10360 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10361 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
10362}
10363
10364
10365/* Opcode 0xf3 0x0f 0xed - invalid */
10366/* Opcode 0xf2 0x0f 0xed - invalid */
10367
10368
10369/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
10370FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
10371{
10372 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10373 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
10374}
10375
10376
10377/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
10378FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
10379{
10380 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10381 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
10382}
10383
10384
10385/* Opcode 0xf3 0x0f 0xee - invalid */
10386/* Opcode 0xf2 0x0f 0xee - invalid */
10387
10388
10389/** Opcode 0x0f 0xef - pxor Pq, Qq */
10390FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
10391{
10392 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10393 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
10394}
10395
10396
10397/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
10398FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
10399{
10400 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10401 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
10402}
10403
10404
10405/* Opcode 0xf3 0x0f 0xef - invalid */
10406/* Opcode 0xf2 0x0f 0xef - invalid */
10407
10408/* Opcode 0x0f 0xf0 - invalid */
10409/* Opcode 0x66 0x0f 0xf0 - invalid */
10410/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
10411FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
10412
10413
10414/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
10415FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
10416{
10417 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10418 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
10419}
10420
10421
10422/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
10423FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
10424{
10425 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10426 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
10427}
10428
10429
10430/* Opcode 0xf2 0x0f 0xf1 - invalid */
10431
10432/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
10433FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
10434{
10435 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10436 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
10437}
10438
10439
10440/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
10441FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
10442{
10443 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10444 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
10445}
10446
10447
10448/* Opcode 0xf2 0x0f 0xf2 - invalid */
10449
10450/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
10451FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
10452{
10453 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10454 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
10455}
10456
10457
10458/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
10459FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
10460{
10461 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10462 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
10463}
10464
10465/* Opcode 0xf2 0x0f 0xf3 - invalid */
10466
10467/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
10468FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
10469/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
10470FNIEMOP_STUB(iemOp_pmuludq_Vx_Wx);
10471/* Opcode 0xf2 0x0f 0xf4 - invalid */
10472
10473/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
10474FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
10475{
10476 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10477 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
10478}
10479
10480
10481/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
10482FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
10483{
10484 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10485 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
10486}
10487
10488/* Opcode 0xf2 0x0f 0xf5 - invalid */
10489
10490/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
10491FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
10492/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
10493FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
10494/* Opcode 0xf2 0x0f 0xf6 - invalid */
10495
10496/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
10497FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
10498/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
10499FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
10500/* Opcode 0xf2 0x0f 0xf7 - invalid */
10501
10502
10503/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
10504FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
10505{
10506 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10507 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
10508}
10509
10510
10511/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
10512FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
10513{
10514 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10515 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
10516}
10517
10518
10519/* Opcode 0xf2 0x0f 0xf8 - invalid */
10520
10521
10522/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
10523FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
10524{
10525 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10526 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
10527}
10528
10529
10530/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
10531FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
10532{
10533 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10534 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
10535}
10536
10537
10538/* Opcode 0xf2 0x0f 0xf9 - invalid */
10539
10540
10541/** Opcode 0x0f 0xfa - psubd Pq, Qq */
10542FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
10543{
10544 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10545 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
10546}
10547
10548
10549/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
10550FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
10551{
10552 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10553 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
10554}
10555
10556
10557/* Opcode 0xf2 0x0f 0xfa - invalid */
10558
10559
10560/** Opcode 0x0f 0xfb - psubq Pq, Qq */
10561FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
10562{
10563 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10564 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_psubq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
10565}
10566
10567
10568/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
10569FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
10570{
10571 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10572 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
10573}
10574
10575
10576/* Opcode 0xf2 0x0f 0xfb - invalid */
10577
10578
10579/** Opcode 0x0f 0xfc - paddb Pq, Qq */
10580FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
10581{
10582 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10583 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
10584}
10585
10586
10587/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
10588FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
10589{
10590 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10591 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
10592}
10593
10594
10595/* Opcode 0xf2 0x0f 0xfc - invalid */
10596
10597
10598/** Opcode 0x0f 0xfd - paddw Pq, Qq */
10599FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
10600{
10601 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10602 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
10603}
10604
10605
10606/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
10607FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
10608{
10609 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10610 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
10611}
10612
10613
10614/* Opcode 0xf2 0x0f 0xfd - invalid */
10615
10616
10617/** Opcode 0x0f 0xfe - paddd Pq, Qq */
10618FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
10619{
10620 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10621 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
10622}
10623
10624
10625/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
10626FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
10627{
10628 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10629 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
10630}
10631
10632
10633/* Opcode 0xf2 0x0f 0xfe - invalid */
10634
10635
10636/** Opcode **** 0x0f 0xff - UD0 */
10637FNIEMOP_DEF(iemOp_ud0)
10638{
10639 IEMOP_MNEMONIC(ud0, "ud0");
10640 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
10641 {
10642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
10643#ifndef TST_IEM_CHECK_MC
10644 if (IEM_IS_MODRM_MEM_MODE(bRm))
10645 {
10646 RTGCPTR GCPtrEff;
10647 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
10648 if (rcStrict != VINF_SUCCESS)
10649 return rcStrict;
10650 }
10651#endif
10652 IEMOP_HLP_DONE_DECODING();
10653 }
10654 return IEMOP_RAISE_INVALID_OPCODE();
10655}
10656
10657
10658
10659/**
10660 * Two byte opcode map, first byte 0x0f.
10661 *
10662 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
10663 * check if it needs updating as well when making changes.
10664 */
10665IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
10666{
10667 /* no prefix, 066h prefix f3h prefix, f2h prefix */
10668 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
10669 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
10670 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
10671 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
10672 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
10673 /* 0x05 */ IEMOP_X4(iemOp_syscall),
10674 /* 0x06 */ IEMOP_X4(iemOp_clts),
10675 /* 0x07 */ IEMOP_X4(iemOp_sysret),
10676 /* 0x08 */ IEMOP_X4(iemOp_invd),
10677 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
10678 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
10679 /* 0x0b */ IEMOP_X4(iemOp_ud2),
10680 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
10681 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
10682 /* 0x0e */ IEMOP_X4(iemOp_femms),
10683 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
10684
10685 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
10686 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
10687 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
10688 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10689 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10690 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10691 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
10692 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10693 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
10694 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
10695 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
10696 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
10697 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
10698 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
10699 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
10700 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
10701
10702 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
10703 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
10704 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
10705 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
10706 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
10707 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
10708 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
10709 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
10710 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10711 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10712 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
10713 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10714 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
10715 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
10716 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10717 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10718
10719 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
10720 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
10721 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
10722 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
10723 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
10724 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
10725 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
10726 /* 0x37 */ IEMOP_X4(iemOp_getsec),
10727 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
10728 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
10729 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
10730 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
10731 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
10732 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
10733 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
10734 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
10735
10736 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
10737 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
10738 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
10739 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
10740 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
10741 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
10742 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
10743 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
10744 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
10745 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
10746 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
10747 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
10748 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
10749 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
10750 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
10751 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
10752
10753 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10754 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
10755 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
10756 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
10757 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10758 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10759 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10760 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10761 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
10762 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
10763 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
10764 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
10765 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
10766 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
10767 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
10768 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
10769
10770 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10771 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10772 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10773 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10774 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10775 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10776 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10777 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10778 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10779 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10780 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10781 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10782 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10783 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10784 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10785 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
10786
10787 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
10788 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
10789 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
10790 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
10791 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10792 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10793 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10794 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10795
10796 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10797 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10798 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10799 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10800 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
10801 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
10802 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
10803 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
10804
10805 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
10806 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
10807 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
10808 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
10809 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
10810 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
10811 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
10812 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
10813 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
10814 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
10815 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
10816 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
10817 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
10818 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
10819 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
10820 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
10821
10822 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
10823 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
10824 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
10825 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
10826 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
10827 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
10828 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
10829 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
10830 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
10831 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
10832 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
10833 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
10834 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
10835 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
10836 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
10837 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
10838
10839 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
10840 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
10841 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
10842 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
10843 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
10844 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
10845 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
10846 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
10847 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
10848 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
10849 /* 0xaa */ IEMOP_X4(iemOp_rsm),
10850 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
10851 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
10852 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
10853 /* 0xae */ IEMOP_X4(iemOp_Grp15),
10854 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
10855
10856 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
10857 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
10858 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
10859 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
10860 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
10861 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
10862 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
10863 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
10864 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
10865 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
10866 /* 0xba */ IEMOP_X4(iemOp_Grp8),
10867 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
10868 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
10869 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
10870 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
10871 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
10872
10873 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
10874 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
10875 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
10876 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10877 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
10878 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
10879 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
10880 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
10881 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
10882 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
10883 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
10884 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
10885 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
10886 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
10887 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
10888 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
10889
10890 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
10891 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10892 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10893 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10894 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10895 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10896 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
10897 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10898 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10899 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10900 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10901 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10902 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10903 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10904 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10905 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10906
10907 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10908 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10909 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10910 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10911 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10912 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10913 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
10914 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10915 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10916 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10917 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10918 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10919 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10920 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10921 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10922 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10923
10924 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
10925 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10926 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10927 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10928 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10929 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10930 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10931 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10932 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10933 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10934 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10935 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10936 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10937 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10938 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10939 /* 0xff */ IEMOP_X4(iemOp_ud0),
10940};
10941AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
10942
10943/** @} */
10944
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette